V8 Project
v8::internal::Heap Class Referencefinal

#include <heap.h>

+ Collaboration diagram for v8::internal::Heap:

Classes

struct  Chunk
 
class  DevToolsTraceEventScope
 
class  ExternalMemoryAccounting
 
class  ExternalStringTable
 
struct  LimitsCompuatationResult
 

Public Types

enum class  HeapGrowingMode { kSlow , kConservative , kMinimal , kDefault }
 
enum  HeapState {
  NOT_IN_GC , SCAVENGE , MARK_COMPACT , MINOR_MARK_SWEEP ,
  TEAR_DOWN
}
 
enum class  StackScanMode { kNone , kFull , kSelective }
 
enum class  OldGenerationExpansionNotificationOrigin { kFromClientHeap , kFromSameHeap }
 
enum class  IterateRootsMode { kMainIsolate , kClientIsolate }
 
enum class  SweepingForcedFinalizationMode { kUnifiedHeap , kV8Only }
 
using Reservation = std::vector< Chunk >
 
using GetExternallyAllocatedMemoryInBytesCallback = v8::Isolate::GetExternallyAllocatedMemoryInBytesCallback
 

Public Member Functions

V8_EXPORT_PRIVATE void FatalProcessOutOfMemory (const char *location)
 
StackScanMode ConservativeStackScanningModeForMinorGC () const
 
StackScanMode ConservativeStackScanningModeForMajorGC () const
 
bool ShouldUsePrecisePinningForMinorGC () const
 
bool ShouldUsePrecisePinningForMajorGC () const
 
EphemeronRememberedSetephemeron_remembered_set ()
 
HeapProfilerheap_profiler () const
 
void NotifyDeserializationComplete ()
 
void WeakenDescriptorArrays (GlobalHandleVector< DescriptorArray > strong_descriptor_arrays)
 
void NotifyBootstrapComplete ()
 
void NotifyOldGenerationExpansion (LocalHeap *local_heap, AllocationSpace space, MutablePageMetadata *chunk, OldGenerationExpansionNotificationOrigin=OldGenerationExpansionNotificationOrigin::kFromSameHeap)
 
AddressNewSpaceAllocationTopAddress ()
 
AddressNewSpaceAllocationLimitAddress ()
 
AddressOldSpaceAllocationTopAddress ()
 
AddressOldSpaceAllocationLimitAddress ()
 
size_t NewSpaceSize ()
 
size_t NewSpaceCapacity () const
 
size_t NewSpaceTargetCapacity () const
 
template<typename TSlot >
V8_EXPORT_PRIVATE void MoveRange (Tagged< HeapObject > dst_object, TSlot dst_slot, TSlot src_slot, int len, WriteBarrierMode mode)
 
template<typename TSlot >
V8_EXPORT_PRIVATE void CopyRange (Tagged< HeapObject > dst_object, TSlot dst_slot, TSlot src_slot, int len, WriteBarrierMode mode)
 
V8_EXPORT_PRIVATE void CreateFillerObjectAt (Address addr, int size, ClearFreedMemoryMode clear_memory_mode=ClearFreedMemoryMode::kDontClearFreedMemory)
 
void CreateFillerObjectAtBackground (const WritableFreeSpace &free_space)
 
bool CanMoveObjectStart (Tagged< HeapObject > object)
 
bool IsImmovable (Tagged< HeapObject > object)
 
V8_EXPORT_PRIVATE Tagged< FixedArrayBaseLeftTrimFixedArray (Tagged< FixedArrayBase > obj, int elements_to_trim)
 
template<typename Array >
void RightTrimArray (Tagged< Array > object, int new_capacity, int old_capacity)
 
Tagged< BooleanToBoolean (bool condition)
 
V8_EXPORT_PRIVATE int NotifyContextDisposed (bool has_dependent_context)
 
void set_native_contexts_list (Tagged< Object > object)
 
Tagged< Objectnative_contexts_list () const
 
void set_allocation_sites_list (Tagged< UnionOf< Smi, Undefined, AllocationSiteWithWeakNext >> object)
 
Tagged< UnionOf< Smi, Undefined, AllocationSiteWithWeakNext > > allocation_sites_list ()
 
void set_dirty_js_finalization_registries_list (Tagged< Object > object)
 
Tagged< Objectdirty_js_finalization_registries_list ()
 
void set_dirty_js_finalization_registries_list_tail (Tagged< Object > object)
 
Tagged< Objectdirty_js_finalization_registries_list_tail ()
 
Address allocation_sites_list_address ()
 
void ForeachAllocationSite (Tagged< Object > list, const std::function< void(Tagged< AllocationSite >)> &visitor)
 
int ms_count () const
 
bool AllowedToBeMigrated (Tagged< Map > map, Tagged< HeapObject > object, AllocationSpace dest)
 
void CheckHandleCount ()
 
void PrintShortHeapStatistics ()
 
void PrintFreeListsStats ()
 
void DumpJSONHeapStatistics (std::stringstream &stream)
 
HeapState gc_state () const
 
V8_EXPORT_PRIVATE void SetGCState (HeapState state)
 
bool IsTearingDown () const
 
bool IsInGC () const
 
bool force_oom () const
 
bool ignore_local_gc_requests () const
 
bool IsAllocationObserverActive () const
 
bool IsGCWithMainThreadStack () const
 
bool IsGCWithStack () const
 
bool CanShortcutStringsDuringGC (GarbageCollector collector) const
 
void CollectGarbageForBackground (LocalHeap *local_heap)
 
void CreateReadOnlyApiObjects ()
 
void CreateMutableApiObjects ()
 
V8_EXPORT_PRIVATE void MemoryPressureNotification (v8::MemoryPressureLevel level, bool is_isolate_locked)
 
void CheckMemoryPressure ()
 
V8_EXPORT_PRIVATE void AddNearHeapLimitCallback (v8::NearHeapLimitCallback, void *data)
 
V8_EXPORT_PRIVATE void RemoveNearHeapLimitCallback (v8::NearHeapLimitCallback callback, size_t heap_limit)
 
V8_EXPORT_PRIVATE void AutomaticallyRestoreInitialHeapLimit (double threshold_percent)
 
V8_EXPORT_PRIVATE void AppendArrayBufferExtension (ArrayBufferExtension *extension)
 
V8_EXPORT_PRIVATE void ResizeArrayBufferExtension (ArrayBufferExtension *extension, int64_t delta)
 
void DetachArrayBufferExtension (ArrayBufferExtension *extension)
 
V8_EXPORT_PRIVATE void ExpandNewSpaceSizeForTesting ()
 
V8_EXPORT_PRIVATE void ReduceNewSpaceSizeForTesting ()
 
IsolateSafepointsafepoint ()
 
V8_EXPORT_PRIVATE double MonotonicallyIncreasingTimeInMs () const
 
void RecordStats (HeapStats *stats)
 
bool MeasureMemory (std::unique_ptr< v8::MeasureMemoryDelegate > delegate, v8::MeasureMemoryExecution execution)
 
std::unique_ptr< v8::MeasureMemoryDelegateCreateDefaultMeasureMemoryDelegate (v8::Local< v8::Context > context, v8::Local< v8::Promise::Resolver > promise, v8::MeasureMemoryMode mode)
 
void IncrementDeferredCounts (base::Vector< const v8::Isolate::UseCounterFeature > features)
 
int NextScriptId ()
 
int NextDebuggingId ()
 
int NextStackTraceId ()
 
uint32_t GetNextTemplateSerialNumber ()
 
void SetSerializedObjects (Tagged< HeapObject > objects)
 
void SetSerializedGlobalProxySizes (Tagged< FixedArray > sizes)
 
void SetBasicBlockProfilingData (DirectHandle< ArrayList > list)
 
void RememberUnmappedPage (Address page, bool compacted)
 
uint64_t external_memory_hard_limit ()
 
uint64_t external_memory () const
 
V8_EXPORT_PRIVATE uint64_t external_memory_limit_for_interrupt ()
 
V8_EXPORT_PRIVATE uint64_t external_memory_soft_limit ()
 
uint64_t UpdateExternalMemory (int64_t delta)
 
V8_EXPORT_PRIVATE size_t YoungArrayBufferBytes ()
 
V8_EXPORT_PRIVATE size_t OldArrayBufferBytes ()
 
uint64_t backing_store_bytes () const
 
void CompactWeakArrayLists ()
 
V8_EXPORT_PRIVATE void AddRetainedMaps (DirectHandle< NativeContext > context, GlobalHandleVector< Map > maps)
 
void OnMoveEvent (Tagged< HeapObject > source, Tagged< HeapObject > target, int size_in_bytes)
 
bool deserialization_complete () const
 
bool CanSafepoint () const
 
bool HasLowAllocationRate ()
 
bool HasHighFragmentation ()
 
void ActivateMemoryReducerIfNeeded ()
 
V8_EXPORT_PRIVATE bool ShouldOptimizeForMemoryUsage ()
 
V8_EXPORT_PRIVATE bool ShouldOptimizeForBattery () const
 
bool HighMemoryPressure ()
 
bool CollectionRequested ()
 
void CheckCollectionRequested ()
 
void RestoreHeapLimit (size_t heap_limit)
 
void ConfigureHeap (const v8::ResourceConstraints &constraints, v8::CppHeap *cpp_heap)
 
void ConfigureHeapDefault ()
 
void SetUp (LocalHeap *main_thread_local_heap)
 
void SetUpFromReadOnlyHeap (ReadOnlyHeap *ro_heap)
 
void ReplaceReadOnlySpace (SharedReadOnlySpace *shared_ro_space)
 
void SetUpSpaces (LinearAllocationArea &new_allocation_info, LinearAllocationArea &old_allocation_info)
 
void InitializeMainThreadLocalHeap (LocalHeap *main_thread_local_heap)
 
void InitializeHashSeed ()
 
bool CreateReadOnlyHeapObjects ()
 
bool CreateMutableHeapObjects ()
 
void CreateObjectStats ()
 
void StartTearDown ()
 
void TearDownWithSharedHeap ()
 
void TearDown ()
 
bool HasBeenSetUp () const
 
Address NewSpaceTop ()
 
Address NewSpaceLimit ()
 
NewSpacenew_space () const
 
PagedNewSpacepaged_new_space () const
 
SemiSpaceNewSpacesemi_space_new_space () const
 
OldSpaceold_space () const
 
StickySpacesticky_space () const
 
CodeSpacecode_space () const
 
SharedSpaceshared_space () const
 
OldLargeObjectSpacelo_space () const
 
CodeLargeObjectSpacecode_lo_space () const
 
SharedLargeObjectSpaceshared_lo_space () const
 
NewLargeObjectSpacenew_lo_space () const
 
ReadOnlySpaceread_only_space () const
 
TrustedSpacetrusted_space () const
 
SharedTrustedSpaceshared_trusted_space () const
 
TrustedLargeObjectSpacetrusted_lo_space () const
 
SharedTrustedLargeObjectSpaceshared_trusted_lo_space () const
 
PagedSpaceshared_allocation_space () const
 
OldLargeObjectSpaceshared_lo_allocation_space () const
 
SharedTrustedSpaceshared_trusted_allocation_space () const
 
SharedTrustedLargeObjectSpaceshared_trusted_lo_allocation_space () const
 
PagedSpacepaged_space (int idx) const
 
Spacespace (int idx) const
 
GCTracertracer ()
 
const GCTracertracer () const
 
MemoryAllocatormemory_allocator ()
 
const MemoryAllocatormemory_allocator () const
 
Isolateisolate () const
 
bool IsMainThread () const
 
MarkCompactCollectormark_compact_collector ()
 
MinorMarkSweepCollectorminor_mark_sweep_collector ()
 
Sweepersweeper ()
 
ArrayBufferSweeperarray_buffer_sweeper ()
 
const base::AddressRegioncode_region ()
 
CodeRangecode_range ()
 
Address code_range_base ()
 
LocalHeapmain_thread_local_heap ()
 
HeapAsHeap ()
 
RootsTableroots_table ()
 
void SetRootMaterializedObjects (Tagged< FixedArray > objects)
 
void SetRootScriptList (Tagged< Object > value)
 
void SetRootNoScriptSharedFunctionInfos (Tagged< Object > value)
 
void SetMessageListeners (Tagged< ArrayList > value)
 
void SetFunctionsMarkedForManualOptimization (Tagged< Object > bytecode)
 
void SetSmiStringCache (Tagged< SmiStringCache > cache)
 
void SetDoubleStringCache (Tagged< DoubleStringCache > cache)
 
StrongRootsEntryRegisterStrongRoots (const char *label, FullObjectSlot start, FullObjectSlot end)
 
void UnregisterStrongRoots (StrongRootsEntry *entry)
 
void UpdateStrongRoots (StrongRootsEntry *entry, FullObjectSlot start, FullObjectSlot end)
 
void SetBuiltinsConstantsTable (Tagged< FixedArray > cache)
 
void SetDetachedContexts (Tagged< WeakArrayList > detached_contexts)
 
void EnqueueDirtyJSFinalizationRegistry (Tagged< JSFinalizationRegistry > finalization_registry, std::function< void(Tagged< HeapObject > object, ObjectSlot slot, Tagged< Object > target)> gc_notify_updated_slot)
 
MaybeDirectHandle< JSFinalizationRegistryDequeueDirtyJSFinalizationRegistry ()
 
void RemoveDirtyFinalizationRegistriesOnContext (Tagged< NativeContext > context)
 
bool HasDirtyJSFinalizationRegistries ()
 
void PostFinalizationRegistryCleanupTaskIfNeeded ()
 
void set_is_finalization_registry_cleanup_task_posted (bool posted)
 
bool is_finalization_registry_cleanup_task_posted ()
 
V8_EXPORT_PRIVATE void KeepDuringJob (DirectHandle< HeapObject > target)
 
void ClearKeptObjects ()
 
V8_EXPORT_PRIVATE void EnableInlineAllocation ()
 
V8_EXPORT_PRIVATE void DisableInlineAllocation ()
 
V8_EXPORT_PRIVATE void CollectGarbage (AllocationSpace space, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags, PerformHeapLimitCheck check_heap_limit_reached=PerformHeapLimitCheck::kYes)
 
V8_EXPORT_PRIVATE void CollectAllGarbage (GCFlags gc_flags, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
 
V8_EXPORT_PRIVATE void CollectAllAvailableGarbage (GarbageCollectionReason gc_reason)
 
V8_EXPORT_PRIVATE void PreciseCollectAllGarbage (GCFlags gc_flags, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
 
V8_EXPORT_PRIVATE bool CollectGarbageShared (LocalHeap *local_heap, GarbageCollectionReason gc_reason)
 
V8_EXPORT_PRIVATE bool CollectGarbageFromAnyThread (LocalHeap *local_heap, GarbageCollectionReason gc_reason=GarbageCollectionReason::kBackgroundAllocationFailure)
 
V8_EXPORT_PRIVATE void CollectGarbageWithRetry (AllocationSpace space, GCFlags gc_flags, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags)
 
void HandleExternalMemoryInterrupt ()
 
void SetGetExternallyAllocatedMemoryInBytesCallback (GetExternallyAllocatedMemoryInBytesCallback callback)
 
void HandleGCRequest ()
 
void IterateRoots (RootVisitor *v, base::EnumSet< SkipRoot > options, IterateRootsMode roots_mode=IterateRootsMode::kMainIsolate)
 
void IterateRootsIncludingClients (RootVisitor *v, base::EnumSet< SkipRoot > options)
 
void IterateSmiRoots (RootVisitor *v)
 
void IterateWeakRoots (RootVisitor *v, base::EnumSet< SkipRoot > options)
 
void IterateWeakGlobalHandles (RootVisitor *v)
 
void IterateBuiltins (RootVisitor *v)
 
void IterateStackRoots (RootVisitor *v)
 
void IterateConservativeStackRoots (RootVisitor *root_visitor, IterateRootsMode roots_mode=IterateRootsMode::kMainIsolate)
 
void IterateConservativeStackRoots (::heap::base::StackVisitor *stack_visitor, StackScanMode stack_scan_mode)
 
void IterateRootsForPrecisePinning (RootVisitor *visitor)
 
uint8_t * IsMarkingFlagAddress ()
 
uint8_t * IsMinorMarkingFlagAddress ()
 
void ClearRecordedSlotRange (Address start, Address end)
 
GCFlags GCFlagsForIncrementalMarking ()
 
V8_EXPORT_PRIVATE void StartIncrementalMarking (GCFlags gc_flags, GarbageCollectionReason gc_reason, GCCallbackFlags gc_callback_flags=GCCallbackFlags::kNoGCCallbackFlags, GarbageCollector collector=GarbageCollector::MARK_COMPACTOR)
 
V8_EXPORT_PRIVATE void StartIncrementalMarkingOnInterrupt ()
 
V8_EXPORT_PRIVATE void StartIncrementalMarkingIfAllocationLimitIsReached (LocalHeap *local_heap, GCFlags gc_flags, GCCallbackFlags gc_callback_flags=GCCallbackFlags::kNoGCCallbackFlags)
 
V8_EXPORT_PRIVATE void FinalizeIncrementalMarkingAtomically (GarbageCollectionReason gc_reason)
 
V8_EXPORT_PRIVATE void FinalizeIncrementalMarkingAtomicallyIfRunning (GarbageCollectionReason gc_reason)
 
V8_EXPORT_PRIVATE void CompleteSweepingFull ()
 
void CompleteSweepingYoung ()
 
void EnsureSweepingCompletedForObject (Tagged< HeapObject > object)
 
IncrementalMarkingincremental_marking () const
 
ConcurrentMarkingconcurrent_marking () const
 
void NotifyObjectLayoutChange (Tagged< HeapObject > object, const DisallowGarbageCollection &, InvalidateRecordedSlots invalidate_recorded_slots, InvalidateExternalPointerSlots invalidate_external_pointer_slots, int new_size=0)
 
void NotifyObjectSizeChange (Tagged< HeapObject >, int old_size, int new_size, ClearRecordedSlots clear_recorded_slots)
 
void SetConstructStubCreateDeoptPCOffset (int pc_offset)
 
void SetConstructStubInvokeDeoptPCOffset (int pc_offset)
 
void SetDeoptPCOffsetAfterAdaptShadowStack (int pc_offset)
 
void SetInterpreterEntryReturnPCOffset (int pc_offset)
 
void DeoptMarkedAllocationSites ()
 
v8::CppHeapcpp_heap () const
 
std::optional< StackStateoverridden_stack_state () const
 
V8_EXPORT_PRIVATE void SetStackStart ()
 
V8_EXPORT_PRIVATE ::heap::base::Stackstack ()
 
V8_EXPORT_PRIVATEconst ::heap::base::Stackstack () const
 
V8_EXPORT_PRIVATE void SetEmbedderRootsHandler (EmbedderRootsHandler *handler)
 
EmbedderRootsHandlerGetEmbedderRootsHandler () const
 
void RegisterExternalString (Tagged< String > string)
 
V8_EXPORT_PRIVATE void UpdateExternalString (Tagged< String > string, size_t old_payload, size_t new_payload)
 
void FinalizeExternalString (Tagged< String > string)
 
bool InOldSpace (Tagged< Object > object)
 
V8_EXPORT_PRIVATE bool Contains (Tagged< HeapObject > value) const
 
V8_EXPORT_PRIVATE bool ContainsCode (Tagged< HeapObject > value) const
 
V8_EXPORT_PRIVATE bool SharedHeapContains (Tagged< HeapObject > value) const
 
V8_EXPORT_PRIVATE bool MustBeInSharedOldSpace (Tagged< HeapObject > value)
 
V8_EXPORT_PRIVATE bool InSpace (Tagged< HeapObject > value, AllocationSpace space) const
 
V8_EXPORT_PRIVATE bool InSpaceSlow (Address addr, AllocationSpace space) const
 
V8_EXPORT_PRIVATE bool CanReferenceHeapObject (Tagged< HeapObject > obj)
 
size_t NumberOfTrackedHeapObjectTypes ()
 
size_t ObjectCountAtLastGC (size_t index)
 
size_t ObjectSizeAtLastGC (size_t index)
 
bool GetObjectTypeName (size_t index, const char **object_type, const char **object_sub_type)
 
size_t NumberOfNativeContexts ()
 
size_t NumberOfDetachedContexts ()
 
void CollectCodeStatistics ()
 
V8_EXPORT_PRIVATE size_t MaxReserved () const
 
size_t MaxSemiSpaceSize ()
 
size_t InitialSemiSpaceSize ()
 
size_t MaxOldGenerationSize ()
 
size_t Capacity ()
 
V8_EXPORT_PRIVATE size_t OldGenerationCapacity () const
 
base::Mutexheap_expansion_mutex ()
 
size_t CommittedMemory ()
 
size_t CommittedOldGenerationMemory ()
 
size_t CommittedMemoryExecutable ()
 
size_t CommittedPhysicalMemory ()
 
size_t MaximumCommittedMemory ()
 
void UpdateMaximumCommitted ()
 
size_t Available ()
 
V8_EXPORT_PRIVATE size_t SizeOfObjects ()
 
V8_EXPORT_PRIVATE size_t TotalGlobalHandlesSize ()
 
V8_EXPORT_PRIVATE size_t UsedGlobalHandlesSize ()
 
void UpdateSurvivalStatistics (int start_new_space_size)
 
void IncrementPromotedObjectsSize (size_t object_size)
 
size_t promoted_objects_size ()
 
void IncrementNewSpaceSurvivingObjectSize (size_t object_size)
 
size_t new_space_surviving_object_size ()
 
size_t SurvivedYoungObjectSize ()
 
void IncrementNodesDiedInNewSpace (int count)
 
void IncrementNodesCopiedInNewSpace ()
 
void IncrementNodesPromoted ()
 
void IncrementYoungSurvivorsCounter (size_t survived)
 
V8_EXPORT_PRIVATE size_t NewSpaceAllocationCounter () const
 
void SetNewSpaceAllocationCounterForTesting (size_t new_value)
 
void UpdateOldGenerationAllocationCounter ()
 
size_t OldGenerationAllocationCounter ()
 
size_t EmbedderAllocationCounter () const
 
void set_old_generation_allocation_counter_at_last_gc (size_t new_value)
 
int gc_count () const
 
bool is_current_gc_forced () const
 
GarbageCollector current_or_last_garbage_collector () const
 
bool ShouldCurrentGCKeepAgesUnchanged () const
 
V8_EXPORT_PRIVATE size_t OldGenerationSizeOfObjects () const
 
V8_EXPORT_PRIVATE size_t OldGenerationWastedBytes () const
 
V8_EXPORT_PRIVATE size_t OldGenerationConsumedBytes () const
 
V8_EXPORT_PRIVATE size_t YoungGenerationSizeOfObjects () const
 
V8_EXPORT_PRIVATE size_t YoungGenerationWastedBytes () const
 
V8_EXPORT_PRIVATE size_t YoungGenerationConsumedBytes () const
 
V8_EXPORT_PRIVATE size_t EmbedderSizeOfObjects () const
 
V8_EXPORT_PRIVATE size_t GlobalSizeOfObjects () const
 
V8_EXPORT_PRIVATE size_t GlobalWastedBytes () const
 
V8_EXPORT_PRIVATE size_t GlobalConsumedBytes () const
 
V8_EXPORT_PRIVATE size_t OldGenerationConsumedBytesAtLastGC () const
 
V8_EXPORT_PRIVATE size_t GlobalConsumedBytesAtLastGC () const
 
bool AllocationLimitOvershotByLargeMargin () const
 
V8_EXPORT_PRIVATE int MaxRegularHeapObjectSize (AllocationType allocation)
 
void AddGCPrologueCallback (v8::Isolate::GCCallbackWithData callback, GCType gc_type_filter, void *data)
 
void RemoveGCPrologueCallback (v8::Isolate::GCCallbackWithData callback, void *data)
 
void AddGCEpilogueCallback (v8::Isolate::GCCallbackWithData callback, GCType gc_type_filter, void *data)
 
void RemoveGCEpilogueCallback (v8::Isolate::GCCallbackWithData callback, void *data)
 
void CallGCPrologueCallbacks (GCType gc_type, GCCallbackFlags flags, GCTracer::Scope::ScopeId scope_id)
 
void CallGCEpilogueCallbacks (GCType gc_type, GCCallbackFlags flags, GCTracer::Scope::ScopeId scope_id)
 
V8_EXPORT_PRIVATE Tagged< HeapObjectPrecedeWithFiller (Tagged< HeapObject > object, int filler_size)
 
V8_EXPORT_PRIVATE Tagged< HeapObjectPrecedeWithFillerBackground (Tagged< HeapObject > object, int filler_size)
 
V8_WARN_UNUSED_RESULT Tagged< HeapObjectAlignWithFillerBackground (Tagged< HeapObject > object, int object_size, int allocation_size, AllocationAlignment alignment)
 
V8_EXPORT_PRIVATE void * AllocateExternalBackingStore (const std::function< void *(size_t)> &allocate, size_t byte_length)
 
void AddAllocationObserversToAllSpaces (AllocationObserver *observer, AllocationObserver *new_space_observer)
 
void RemoveAllocationObserversFromAllSpaces (AllocationObserver *observer, AllocationObserver *new_space_observer)
 
bool IsPendingAllocation (Tagged< HeapObject > object)
 
bool IsPendingAllocation (Tagged< Object > object)
 
V8_EXPORT_PRIVATE void PublishMainThreadPendingAllocations ()
 
V8_EXPORT_PRIVATE void AddHeapObjectAllocationTracker (HeapObjectAllocationTracker *tracker)
 
V8_EXPORT_PRIVATE void RemoveHeapObjectAllocationTracker (HeapObjectAllocationTracker *tracker)
 
bool has_heap_object_allocation_tracker () const
 
V8_EXPORT_PRIVATE Tagged< CodeFindCodeForInnerPointer (Address inner_pointer)
 
Tagged< GcSafeCodeGcSafeFindCodeForInnerPointer (Address inner_pointer)
 
std::optional< Tagged< GcSafeCode > > GcSafeTryFindCodeForInnerPointer (Address inner_pointer)
 
std::optional< Tagged< InstructionStream > > GcSafeTryFindInstructionStreamForInnerPointer (Address inner_pointer)
 
std::optional< Tagged< Code > > TryFindCodeForInnerPointerForPrinting (Address inner_pointer)
 
bool GcSafeInstructionStreamContains (Tagged< InstructionStream > instruction_stream, Address addr)
 
bool sweeping_in_progress () const
 
bool sweeping_in_progress_for_space (AllocationSpace space) const
 
bool minor_sweeping_in_progress () const
 
bool major_sweeping_in_progress () const
 
void FinishSweepingIfOutOfWork ()
 
V8_EXPORT_PRIVATE void EnsureSweepingCompleted (SweepingForcedFinalizationMode mode)
 
void EnsureYoungSweepingCompleted ()
 
void EnsureQuarantinedPagesSweepingCompleted ()
 
void * GetRandomMmapAddr ()
 
V8_EXPORT_PRIVATE void MakeHeapIterable ()
 
V8_EXPORT_PRIVATE void Unmark ()
 
V8_EXPORT_PRIVATE void DeactivateMajorGCInProgressFlag ()
 
V8_EXPORT_PRIVATE void FreeLinearAllocationAreas ()
 
V8_EXPORT_PRIVATE void FreeMainThreadLinearAllocationAreas ()
 
V8_EXPORT_PRIVATE bool CanPromoteYoungAndExpandOldGeneration (size_t size) const
 
V8_EXPORT_PRIVATE bool CanExpandOldGeneration (size_t size) const
 
V8_EXPORT_PRIVATE bool IsOldGenerationExpansionAllowed (size_t size, const base::MutexGuard &expansion_mutex_witness) const
 
bool ShouldReduceMemory () const
 
bool IsLastResortGC ()
 
MarkingStatemarking_state ()
 
NonAtomicMarkingStatenon_atomic_marking_state ()
 
PretenuringHandlerpretenuring_handler ()
 
bool IsInlineAllocationEnabled () const
 
V8_EXPORT_PRIVATE uint64_t AllocatedExternalMemorySinceMarkCompact () const
 
std::shared_ptr< v8::TaskRunnerGetForegroundTaskRunner (TaskPriority priority=TaskPriority::kUserBlocking) const
 
bool ShouldUseBackgroundThreads () const
 
bool ShouldUseIncrementalMarking () const
 
HeapAllocatorallocator ()
 
const HeapAllocatorallocator () const
 
bool use_new_space () const
 
bool IsNewSpaceAllowedToGrowAboveTargetCapacity () const
 
template<typename TSlot >
void MoveRange (Tagged< HeapObject > dst_object, const TSlot dst_slot, const TSlot src_slot, int len, WriteBarrierMode mode)
 
template<typename TSlot >
void CopyRange (Tagged< HeapObject > dst_object, const TSlot dst_slot, const TSlot src_slot, int len, WriteBarrierMode mode)
 

Static Public Member Functions

static V8_EXPORT_PRIVATE size_t DefaultInitialOldGenerationSize ()
 
static V8_EXPORT_PRIVATE size_t OldGenerationLowMemory ()
 
static V8_EXPORT_PRIVATE size_t HeapLimitMultiplier ()
 
static size_t DefaultMinSemiSpaceSize ()
 
static V8_EXPORT_PRIVATE size_t DefaultMaxSemiSpaceSize ()
 
static size_t OldGenerationToSemiSpaceRatio ()
 
static size_t OldGenerationToSemiSpaceRatioLowMemory ()
 
static V8_EXPORT_PRIVATE size_t DefaulMinHeapSize ()
 
static V8_EXPORT_PRIVATE size_t DefaulMaxHeapSize ()
 
static V8_EXPORT_PRIVATE int GetMaximumFillToAlign (AllocationAlignment alignment)
 
static V8_EXPORT_PRIVATE int GetFillToAlign (Address address, AllocationAlignment alignment)
 
static size_t GetCodeRangeReservedAreaSize ()
 
static bool IsValidAllocationSpace (AllocationSpace space)
 
static bool IsYoungGenerationCollector (GarbageCollector collector)
 
static V8_EXPORT_PRIVATE bool IsFreeSpaceValid (const FreeSpace *object)
 
static GarbageCollector YoungGenerationCollector ()
 
static void CopyBlock (Address dst, Address src, int byte_size)
 
static V8_EXPORT_PRIVATE bool IsLargeObject (Tagged< HeapObject > object)
 
static void InitializeOncePerProcess ()
 
static int InsertIntoRememberedSetFromCode (MutablePageMetadata *chunk, size_t slot_offset)
 
static V8_EXPORT_PRIVATE void NotifyObjectLayoutChangeDone (Tagged< HeapObject > object)
 
static Tagged< StringUpdateYoungReferenceInExternalStringTableEntry (Heap *heap, FullObjectSlot pointer)
 
static bool InFromPage (Tagged< Object > object)
 
static bool InFromPage (Tagged< MaybeObject > object)
 
static bool InFromPage (Tagged< HeapObject > heap_object)
 
static bool InToPage (Tagged< Object > object)
 
static bool InToPage (Tagged< MaybeObject > object)
 
static bool InToPage (Tagged< HeapObject > heap_object)
 
static HeapFromWritableHeapObject (Tagged< HeapObject > obj)
 
static V8_EXPORT_PRIVATE size_t AllocatorLimitOnMaxOldGenerationSize ()
 
static V8_EXPORT_PRIVATE size_t HeapSizeFromPhysicalMemory (uint64_t physical_memory)
 
static V8_EXPORT_PRIVATE void GenerationSizesFromHeapSize (size_t heap_size, size_t *young_generation_size, size_t *old_generation_size)
 
static V8_EXPORT_PRIVATE size_t YoungGenerationSizeFromOldGenerationSize (size_t old_generation_size)
 
static V8_EXPORT_PRIVATE size_t YoungGenerationSizeFromSemiSpaceSize (size_t semi_space_size)
 
static V8_EXPORT_PRIVATE size_t SemiSpaceSizeFromYoungGenerationSize (size_t young_generation_size)
 
static V8_EXPORT_PRIVATE size_t MinYoungGenerationSize ()
 
static V8_EXPORT_PRIVATE size_t MinOldGenerationSize ()
 
static V8_EXPORT_PRIVATE size_t MaxOldGenerationSizeFromPhysicalMemory (uint64_t physical_memory)
 

Static Public Attributes

static const int kPointerMultiplier = kTaggedSize / 4
 
static constexpr size_t kPhysicalMemoryToOldGenerationRatio = 4
 
static constexpr size_t kNewLargeObjectSpaceToSemiSpaceRatio = 1
 
static const int kTraceRingBufferSize = 512
 
static const int kStacktraceBufferSize = 512
 
static const int kMinObjectSizeInTaggedWords = 2
 

Private Types

enum class  VerifyNoSlotsRecorded { kYes , kNo }
 
enum class  ResizeNewSpaceMode { kShrink , kGrow , kNone }
 
enum class  IncrementalMarkingLimit { kNoLimit , kSoftLimit , kHardLimit , kFallbackForEmbedderLimit }
 
enum  AllocationRetryMode { kLightRetry , kRetryOrFail }
 
using ExternalStringTableUpdaterCallback = Tagged< String >(*)(Heap *heap, FullObjectSlot pointer)
 

Private Member Functions

void AttachCppHeap (v8::CppHeap *cpp_heap)
 
 Heap ()
 
 ~Heap ()
 
 Heap (const Heap &)=delete
 
Heapoperator= (const Heap &)=delete
 
int NumberOfScavengeTasks ()
 
GarbageCollector SelectGarbageCollector (AllocationSpace space, GarbageCollectionReason gc_reason, const char **reason) const
 
void CheckHeapLimitReached ()
 
bool ReachedHeapLimit ()
 
void MakeLinearAllocationAreasIterable ()
 
void MarkSharedLinearAllocationAreasBlack ()
 
void UnmarkSharedLinearAllocationAreas ()
 
void FreeSharedLinearAllocationAreasAndResetFreeLists ()
 
void PerformGarbageCollection (GarbageCollector collector, GarbageCollectionReason gc_reason, const char *collector_reason)
 
void PerformHeapVerification ()
 
std::vector< Isolate * > PauseConcurrentThreadsInClients (GarbageCollector collector)
 
void ResumeConcurrentThreadsInClients (std::vector< Isolate * > paused_clients)
 
void StaticRootsEnsureAllocatedSize (DirectHandle< HeapObject > obj, int required)
 
bool CreateEarlyReadOnlyMapsAndObjects ()
 
bool CreateImportantReadOnlyObjects ()
 
bool CreateLateReadOnlyNonJSReceiverMaps ()
 
bool CreateLateReadOnlyJSReceiverMaps ()
 
bool CreateReadOnlyObjects ()
 
void CreateInternalAccessorInfoObjects ()
 
void CreateInitialMutableObjects ()
 
void CreateFillerObjectAtRaw (const WritableFreeSpace &free_space, ClearFreedMemoryMode clear_memory_mode, ClearRecordedSlots clear_slots_mode, VerifyNoSlotsRecorded verify_no_slots_recorded)
 
void ResetAllAllocationSitesDependentCode (AllocationType allocation)
 
void EvaluateOldSpaceLocalPretenuring (uint64_t size_of_objects_before_gc)
 
void ReportStatisticsAfterGC ()
 
void ActivateMemoryReducerIfNeededOnMainThread ()
 
void ShrinkOldGenerationAllocationLimitIfNotConfigured ()
 
void EnsureMinimumRemainingAllocationLimit (size_t at_least_remaining)
 
double ComputeMutatorUtilization (const char *tag, double mutator_speed, std::optional< double > gc_speed)
 
bool HasLowYoungGenerationAllocationRate ()
 
bool HasLowOldGenerationAllocationRate ()
 
bool HasLowEmbedderAllocationRate ()
 
ResizeNewSpaceMode ShouldResizeNewSpace ()
 
void StartResizeNewSpace ()
 
void ResizeNewSpace ()
 
void ExpandNewSpaceSize ()
 
void ReduceNewSpaceSize ()
 
void PrintMaxMarkingLimitReached ()
 
void PrintMaxNewSpaceSizeReached ()
 
int NextStressMarkingLimit ()
 
void AddToRingBuffer (const char *string)
 
void GetFromRingBuffer (char *buffer)
 
void CompactRetainedMaps (Tagged< WeakArrayList > retained_maps)
 
void CollectGarbageOnMemoryPressure ()
 
void EagerlyFreeExternalMemoryAndWasmCode ()
 
bool InvokeNearHeapLimitCallback ()
 
void InvokeIncrementalMarkingPrologueCallbacks ()
 
void InvokeIncrementalMarkingEpilogueCallbacks ()
 
Tagged< GcSafeCodeGcSafeGetCodeFromInstructionStream (Tagged< HeapObject > instruction_stream, Address inner_pointer)
 
Tagged< MapGcSafeMapOfHeapObject (Tagged< HeapObject > object)
 
void GarbageCollectionPrologue (GarbageCollectionReason gc_reason, const v8::GCCallbackFlags gc_callback_flags)
 
void GarbageCollectionPrologueInSafepoint (GarbageCollector collector)
 
void GarbageCollectionEpilogue (GarbageCollector collector)
 
void GarbageCollectionEpilogueInSafepoint (GarbageCollector collector)
 
void MarkCompact ()
 
void MinorMarkSweep ()
 
void MarkCompactPrologue ()
 
void MarkCompactEpilogue ()
 
void Scavenge ()
 
void UpdateYoungReferencesInExternalStringTable (ExternalStringTableUpdaterCallback updater_func)
 
void UpdateReferencesInExternalStringTable (ExternalStringTableUpdaterCallback updater_func)
 
void ProcessAllWeakReferences (WeakObjectRetainer *retainer)
 
void ProcessNativeContexts (WeakObjectRetainer *retainer)
 
void ProcessAllocationSites (WeakObjectRetainer *retainer)
 
void ProcessDirtyJSFinalizationRegistries (WeakObjectRetainer *retainer)
 
void ProcessWeakListRoots (WeakObjectRetainer *retainer)
 
size_t OldGenerationSpaceAvailable ()
 
void UpdateTotalGCTime (base::TimeDelta duration)
 
bool IsIneffectiveMarkCompact (size_t old_generation_size, double mutator_utilization)
 
void CheckIneffectiveMarkCompact (size_t old_generation_size, double mutator_utilization)
 
void IncrementExternalBackingStoreBytes (ExternalBackingStoreType type, size_t amount)
 
void DecrementExternalBackingStoreBytes (ExternalBackingStoreType type, size_t amount)
 
MemoryReducermemory_reducer ()
 
V8_EXPORT_PRIVATE bool ShouldOptimizeForLoadTime () const
 
void NotifyLoadingStarted ()
 
void NotifyLoadingEnded ()
 
size_t old_generation_allocation_limit () const
 
size_t global_allocation_limit () const
 
bool using_initial_limit () const
 
void set_using_initial_limit (bool value)
 
size_t max_old_generation_size () const
 
size_t min_old_generation_size () const
 
void SetOldGenerationAndGlobalMaximumSize (size_t max_old_generation_size)
 
void SetOldGenerationAndGlobalAllocationLimit (size_t new_old_generation_allocation_limit, size_t new_global_allocation_limit, const char *reason=__builtin_FUNCTION())
 
void ResetOldGenerationAndGlobalAllocationLimit ()
 
bool always_allocate () const
 
bool ShouldExpandOldGenerationOnSlowAllocation (LocalHeap *local_heap, AllocationOrigin origin)
 
bool ShouldExpandYoungGenerationOnSlowAllocation (size_t allocation_size)
 
HeapGrowingMode CurrentHeapGrowingMode ()
 
double PercentToOldGenerationLimit () const
 
double PercentToGlobalMemoryLimit () const
 
IncrementalMarkingLimit IncrementalMarkingLimitReached ()
 
bool ShouldStressCompaction () const
 
size_t GlobalMemoryAvailable ()
 
void RecomputeLimits (GarbageCollector collector, base::TimeTicks time)
 
void RecomputeLimitsAfterLoadingIfNeeded ()
 
V8_EXPORT_PRIVATE void StartMinorMSConcurrentMarkingIfNeeded ()
 
bool MinorMSSizeTaskTriggerReached () const
 
MinorGCJobminor_gc_job ()
 
V8_WARN_UNUSED_RESULT AllocationResult AllocateMap (AllocationType allocation_type, InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND, int inobject_properties=0)
 
V8_WARN_UNUSED_RESULT AllocationResult AllocateRaw (int size_in_bytes, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned)
 
template<AllocationRetryMode mode>
V8_WARN_UNUSED_RESULT Tagged< HeapObjectAllocateRawWith (int size, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned)
 
V8_WARN_UNUSED_RESULT Address AllocateRawOrFail (int size, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned)
 
V8_WARN_UNUSED_RESULT AllocationResult Allocate (DirectHandle< Map > map, AllocationType allocation)
 
V8_WARN_UNUSED_RESULT AllocationResult AllocatePartialMap (InstanceType instance_type, int instance_size)
 
void FinalizePartialMap (Tagged< Map > map)
 
void set_force_oom (bool value)
 
void set_force_gc_on_next_allocation ()
 
bool IsPendingAllocationInternal (Tagged< HeapObject > object)
 
std::vector< Handle< NativeContext > > FindAllNativeContexts ()
 
std::vector< Tagged< WeakArrayList > > FindAllRetainedMaps ()
 
MemoryMeasurementmemory_measurement ()
 
AllocationType allocation_type_for_in_place_internalizable_strings () const
 
bool IsStressingScavenge ()
 
void SetIsMarkingFlag (bool value)
 
void SetIsMinorMarkingFlag (bool value)
 
size_t PromotedSinceLastGC ()
 
 FRIEND_TEST (SpacesTest, InlineAllocationObserverCadence)
 
 FRIEND_TEST (SpacesTest, AllocationObserver)
 

Static Private Member Functions

static bool IsRegularObjectAllocation (AllocationType allocation)
 
static LimitsCompuatationResult ComputeNewAllocationLimits (Heap *heap)
 

Private Attributes

ExternalMemoryAccounting external_memory_
 
Isolateisolate_ = nullptr
 
HeapAllocatorheap_allocator_ = nullptr
 
size_t code_range_size_ = 0
 
size_t max_semi_space_size_ = 0
 
size_t min_semi_space_size_ = 0
 
size_t initial_semispace_size_ = 0
 
size_t min_old_generation_size_ = 0
 
std::atomic< size_tmax_old_generation_size_ {0}
 
size_t min_global_memory_size_ = 0
 
size_t max_global_memory_size_ = 0
 
size_t initial_max_old_generation_size_ = 0
 
size_t initial_max_old_generation_size_threshold_ = 0
 
size_t initial_old_generation_size_ = 0
 
std::atomic< boolusing_initial_limit_ = true
 
bool initial_size_overwritten_ = false
 
bool preconfigured_old_generation_size_ = false
 
size_t maximum_committed_ = 0
 
size_t old_generation_capacity_after_bootstrap_ = 0
 
std::atomic< uint64_t > backing_store_bytes_ {0}
 
size_t survived_since_last_expansion_ = 0
 
std::atomic< size_talways_allocate_scope_count_ {0}
 
std::atomic< v8::MemoryPressureLevelmemory_pressure_level_
 
std::vector< std::pair< v8::NearHeapLimitCallback, void * > > near_heap_limit_callbacks_
 
int contexts_disposed_ = 0
 
NewSpacenew_space_ = nullptr
 
OldSpaceold_space_ = nullptr
 
CodeSpacecode_space_ = nullptr
 
SharedSpaceshared_space_ = nullptr
 
OldLargeObjectSpacelo_space_ = nullptr
 
CodeLargeObjectSpacecode_lo_space_ = nullptr
 
NewLargeObjectSpacenew_lo_space_ = nullptr
 
SharedLargeObjectSpaceshared_lo_space_ = nullptr
 
ReadOnlySpaceread_only_space_ = nullptr
 
TrustedSpacetrusted_space_ = nullptr
 
SharedTrustedSpaceshared_trusted_space_ = nullptr
 
TrustedLargeObjectSpacetrusted_lo_space_ = nullptr
 
SharedTrustedLargeObjectSpaceshared_trusted_lo_space_ = nullptr
 
PagedSpaceshared_allocation_space_ = nullptr
 
OldLargeObjectSpaceshared_lo_allocation_space_ = nullptr
 
SharedTrustedSpaceshared_trusted_allocation_space_ = nullptr
 
SharedTrustedLargeObjectSpaceshared_trusted_lo_allocation_space_ = nullptr
 
std::unique_ptr< Spacespace_ [LAST_SPACE+1]
 
LocalHeapmain_thread_local_heap_ = nullptr
 
std::atomic< HeapStategc_state_ {NOT_IN_GC}
 
int stress_marking_percentage_ = 0
 
StressScavengeObserverstress_scavenge_observer_ = nullptr
 
std::atomic< double > max_marking_limit_reached_ = 0.0
 
unsigned int ms_count_ = 0
 
unsigned int gc_count_ = 0
 
int consecutive_ineffective_mark_compacts_ = 0
 
uintptr_t mmap_region_base_ = 0
 
int remembered_unmapped_pages_index_ = 0
 
Address remembered_unmapped_pages_ [kRememberedUnmappedPages]
 
std::atomic< size_told_generation_allocation_limit_ {0}
 
std::atomic< size_tglobal_allocation_limit_ {0}
 
std::atomic< Addressnative_contexts_list_
 
Tagged< UnionOf< Smi, Undefined, AllocationSiteWithWeakNext > > allocation_sites_list_ = Smi::zero()
 
Tagged< Objectdirty_js_finalization_registries_list_ = Smi::zero()
 
Tagged< Objectdirty_js_finalization_registries_list_tail_ = Smi::zero()
 
GCCallbacks gc_prologue_callbacks_
 
GCCallbacks gc_epilogue_callbacks_
 
GetExternallyAllocatedMemoryInBytesCallback external_memory_callback_
 
base::SmallVector< v8::Isolate::UseCounterFeature, 8 > deferred_counters_
 
size_t promoted_objects_size_ = 0
 
double promotion_ratio_ = 0.0
 
double promotion_rate_ = 0.0
 
size_t new_space_surviving_object_size_ = 0
 
size_t previous_new_space_surviving_object_size_ = 0
 
double new_space_surviving_rate_ = 0.0
 
int nodes_died_in_new_space_ = 0
 
int nodes_copied_in_new_space_ = 0
 
int nodes_promoted_ = 0
 
base::TimeDelta total_gc_time_ms_
 
double last_gc_time_ = 0.0
 
std::unique_ptr< GCTracertracer_
 
std::unique_ptr< Sweepersweeper_
 
std::unique_ptr< MarkCompactCollectormark_compact_collector_
 
std::unique_ptr< MinorMarkSweepCollectorminor_mark_sweep_collector_
 
std::unique_ptr< ScavengerCollectorscavenger_collector_
 
std::unique_ptr< ArrayBufferSweeperarray_buffer_sweeper_
 
std::unique_ptr< MemoryAllocatormemory_allocator_
 
std::unique_ptr< IncrementalMarkingincremental_marking_
 
std::unique_ptr< ConcurrentMarkingconcurrent_marking_
 
std::unique_ptr< MemoryMeasurementmemory_measurement_
 
std::unique_ptr< MemoryReducermemory_reducer_
 
std::unique_ptr< ObjectStatslive_object_stats_
 
std::unique_ptr< ObjectStatsdead_object_stats_
 
std::unique_ptr< MinorGCJobminor_gc_job_
 
std::unique_ptr< AllocationObserverstress_concurrent_allocation_observer_
 
std::unique_ptr< AllocationTrackerForDebugging > allocation_tracker_for_debugging_
 
std::unique_ptr< EphemeronRememberedSetephemeron_remembered_set_
 
std::unique_ptr< HeapProfilerheap_profiler_
 
std::shared_ptr< v8::TaskRunnertask_runner_
 
std::unique_ptr< CodeRangecode_range_
 
std::unique_ptr< CppHeapowning_cpp_heap_
 
v8::CppHeapcpp_heap_ = nullptr
 
EmbedderRootsHandlerembedder_roots_handler_
 
StackState embedder_stack_state_ = StackState::kMayContainHeapPointers
 
std::optional< EmbedderStackStateOriginembedder_stack_state_origin_
 
StrongRootsEntrystrong_roots_head_ = nullptr
 
base::Mutex strong_roots_mutex_
 
base::Mutex heap_expansion_mutex_
 
bool need_to_remove_stress_concurrent_allocation_observer_ = false
 
size_t new_space_allocation_counter_ = 0
 
size_t old_generation_allocation_counter_at_last_gc_ = 0
 
size_t old_generation_size_at_last_gc_ {0}
 
size_t old_generation_wasted_at_last_gc_ {0}
 
size_t embedder_size_at_last_gc_ = 0
 
char trace_ring_buffer_ [kTraceRingBufferSize]
 
bool ring_buffer_full_ = false
 
size_t ring_buffer_end_ = 0
 
bool configured_ = false
 
GCFlags current_gc_flags_ = GCFlag::kNoFlags
 
GCCallbackFlags current_gc_callback_flags_
 
std::unique_ptr< IsolateSafepointsafepoint_
 
bool is_current_gc_forced_ = false
 
bool is_current_gc_for_heap_profiler_ = false
 
GarbageCollector current_or_last_garbage_collector_
 
ExternalStringTable external_string_table_
 
const AllocationType allocation_type_for_in_place_internalizable_strings_
 
std::unique_ptr< CollectionBarriercollection_barrier_
 
int ignore_local_gc_requests_depth_ = 0
 
int gc_callbacks_depth_ = 0
 
bool deserialization_complete_ = false
 
int max_regular_code_object_size_ = 0
 
bool inline_allocation_enabled_ = true
 
int pause_allocation_observers_depth_ = 0
 
bool force_oom_ = false
 
bool force_gc_on_next_allocation_ = false
 
bool delay_sweeper_tasks_for_testing_ = false
 
std::vector< HeapObjectAllocationTracker * > allocation_trackers_
 
bool is_finalization_registry_cleanup_task_posted_ = false
 
MarkingState marking_state_
 
NonAtomicMarkingState non_atomic_marking_state_
 
PretenuringHandler pretenuring_handler_
 
ResizeNewSpaceMode resize_new_space_mode_ = ResizeNewSpaceMode::kNone
 
std::unique_ptr< MemoryBalancermb_
 
std::atomic< double > load_start_time_ms_ {kLoadTimeNotLoading}
 
bool update_allocation_limits_after_loading_ = false
 
bool is_full_gc_during_loading_ = false
 
std::optional< const void * > selective_stack_scan_start_address_
 

Static Private Attributes

static const int kInitialEvalCacheSize = 64
 
static const int kRememberedUnmappedPages = 128
 
static const int kYoungSurvivalRateHighThreshold = 90
 
static const int kYoungSurvivalRateAllowedDeviation = 15
 
static const int kOldSurvivalRateLowThreshold = 10
 
static const int kMaxMarkCompactsInIdleRound = 7
 
static constexpr int kRetainMapEntrySize = 2
 
static const int kMaxLoadTimeMs = 7000
 
static const uintptr_t kMmapRegionMask = 0xFFFFFFFFu
 
static constexpr double kLoadTimeNotLoading = -1.0
 

Friends

class ActivateMemoryReducerTask
 
class AlwaysAllocateScope
 
class ArrayBufferCollector
 
class ArrayBufferSweeper
 
class ConservativePinningScope
 
class ConcurrentMarking
 
class ConservativeTracedHandlesMarkingVisitor
 
class CppHeap
 
class EmbedderStackStateScope
 
class EvacuateVisitorBase
 
class GCCallbacksScope
 
class GCTracer
 
class HeapAllocator
 
class HeapObjectIterator
 
class HeapVerifier
 
class IgnoreLocalGCRequests
 
class IncrementalMarking
 
class IncrementalMarkingJob
 
class LargeObjectSpace
 
class LocalHeap
 
class MarkingBarrier
 
class OldLargeObjectSpace
 
template<typename ConcreteVisitor >
class MarkingVisitorBase
 
class MarkCompactCollector
 
class MemoryBalancer
 
class MinorGCJob
 
class MinorGCTaskObserver
 
class MinorMarkSweepCollector
 
class MinorMSIncrementalMarkingTaskObserver
 
class NewLargeObjectSpace
 
class NewSpace
 
class ObjectStatsCollector
 
class PageMetadata
 
class PagedNewSpaceAllocatorPolicy
 
class PagedSpaceAllocatorPolicy
 
class PagedSpaceBase
 
class PagedSpaceForNewSpace
 
class PauseAllocationObserversScope
 
class PretenuringHandler
 
class ReadOnlyRoots
 
class DisableConservativeStackScanningScopeForTesting
 
class Scavenger
 
class ScavengerCollector
 
class ScheduleMinorGCTaskObserver
 
class SemiSpaceNewSpace
 
class SemiSpaceNewSpaceAllocatorPolicy
 
class StressConcurrentAllocationObserver
 
class Space
 
class SpaceWithLinearArea
 
class Sweeper
 
class UnifiedHeapMarkingState
 
class heap::TestMemoryAllocatorScope
 
class Factory
 
class LocalFactory
 
template<typename IsolateT >
class Deserializer
 
class Isolate
 
class heap::HeapTester
 
class HeapInternalsBase
 

Detailed Description

Definition at line 212 of file heap.h.

Member Typedef Documentation

◆ ExternalStringTableUpdaterCallback

Definition at line 1711 of file heap.h.

◆ GetExternallyAllocatedMemoryInBytesCallback

◆ Reservation

using v8::internal::Heap::Reservation = std::vector<Chunk>

Definition at line 308 of file heap.h.

Member Enumeration Documentation

◆ AllocationRetryMode

Enumerator
kLightRetry 
kRetryOrFail 

Definition at line 2078 of file heap.h.

◆ HeapGrowingMode

Enumerator
kSlow 
kConservative 
kMinimal 
kDefault 

Definition at line 214 of file heap.h.

◆ HeapState

Enumerator
NOT_IN_GC 
SCAVENGE 
MARK_COMPACT 
MINOR_MARK_SWEEP 
TEAR_DOWN 

Definition at line 216 of file heap.h.

216  {
217  NOT_IN_GC,
218  SCAVENGE,
219  MARK_COMPACT,
221  TEAR_DOWN
222  };

◆ IncrementalMarkingLimit

Enumerator
kNoLimit 
kSoftLimit 
kHardLimit 
kFallbackForEmbedderLimit 

Definition at line 2027 of file heap.h.

2027  {
2028  kNoLimit,
2029  kSoftLimit,
2030  kHardLimit,
2031  kFallbackForEmbedderLimit
2032  };

◆ IterateRootsMode

Enumerator
kMainIsolate 
kClientIsolate 

Definition at line 1033 of file heap.h.

1033 { kMainIsolate, kClientIsolate };

◆ OldGenerationExpansionNotificationOrigin

Enumerator
kFromClientHeap 
kFromSameHeap 

Definition at line 422 of file heap.h.

422  {
423  // Specifies that the notification is coming from the client heap.
424  kFromClientHeap,
425  // Specifies that the notification is done within the same heap.
426  kFromSameHeap,
427  };

◆ ResizeNewSpaceMode

Enumerator
kShrink 
kGrow 
kNone 

Definition at line 1864 of file heap.h.

1864 { kShrink, kGrow, kNone };

◆ StackScanMode

Enumerator
kNone 
kFull 
kSelective 

Definition at line 377 of file heap.h.

◆ SweepingForcedFinalizationMode

Enumerator
kUnifiedHeap 
kV8Only 

Definition at line 1604 of file heap.h.

1604 { kUnifiedHeap, kV8Only };

◆ VerifyNoSlotsRecorded

Enumerator
kYes 
kNo 

Definition at line 1829 of file heap.h.

Constructor & Destructor Documentation

◆ Heap() [1/2]

v8::internal::Heap::Heap ( )
private

Definition at line 177 of file heap.cc.

178  : isolate_(isolate()),
180  safepoint_(std::make_unique<IsolateSafepoint>(this)),
183  isolate()->OwnsStringTables() ? AllocationType::kOld
187  pretenuring_handler_(this) {
188  // Ensure old_generation_size_ is a multiple of kPageSize.
190 
192 
194 
195  // Put a dummy entry in the remembered pages so we can find the list the
196  // minidump even if there are no real unmapped pages.
198 }
ExternalStringTable external_string_table_
Definition: heap.h:2429
void set_native_contexts_list(Tagged< Object > object)
Definition: heap.h:500
NonAtomicMarkingState non_atomic_marking_state_
Definition: heap.h:2457
Isolate * isolate_
Definition: heap.h:2137
std::atomic< v8::MemoryPressureLevel > memory_pressure_level_
Definition: heap.h:2198
size_t max_old_generation_size() const
Definition: heap.h:1999
void RememberUnmappedPage(Address page, bool compacted)
Definition: heap.cc:6947
int max_regular_code_object_size_
Definition: heap.h:2441
const AllocationType allocation_type_for_in_place_internalizable_strings_
Definition: heap.h:2431
std::unique_ptr< IsolateSafepoint > safepoint_
Definition: heap.h:2422
MarkingState marking_state_
Definition: heap.h:2456
PretenuringHandler pretenuring_handler_
Definition: heap.h:2459
Isolate * isolate() const
Definition: heap-inl.h:61
static constexpr int MaxRegularCodeObjectSize()
static constexpr Tagged< Smi > zero()
Definition: smi.h:99
static constexpr Address kNullAddress
Definition: v8-internal.h:53
#define DCHECK_EQ(v1, v2)
Definition: logging.h:484

References DCHECK_EQ, v8::internal::kNone, v8::internal::kNullAddress, v8::internal::kOld, v8::internal::MutablePageMetadata::kPageSize, v8::internal::kSharedOld, max_old_generation_size(), max_regular_code_object_size_, v8::internal::MemoryChunkLayout::MaxRegularCodeObjectSize(), RememberUnmappedPage(), set_native_contexts_list(), and v8::internal::Smi::zero().

+ Here is the call graph for this function:

◆ ~Heap()

v8::internal::Heap::~Heap ( )
privatedefault

◆ Heap() [2/2]

v8::internal::Heap::Heap ( const Heap )
privatedelete

Member Function Documentation

◆ ActivateMemoryReducerIfNeeded()

void v8::internal::Heap::ActivateMemoryReducerIfNeeded ( )

Definition at line 3924 of file heap.cc.

3924  {
3925  if (memory_reducer_ == nullptr) return;
3926  // This method may be called from any thread. Post a task to run it on the
3927  // isolate's main thread to avoid synchronization.
3928  task_runner_->PostTask(std::make_unique<ActivateMemoryReducerTask>(this));
3929 }
std::unique_ptr< MemoryReducer > memory_reducer_
Definition: heap.h:2340
std::shared_ptr< v8::TaskRunner > task_runner_
Definition: heap.h:2350

References memory_reducer_, and task_runner_.

Referenced by v8::internal::Isolate::SetPriority().

+ Here is the caller graph for this function:

◆ ActivateMemoryReducerIfNeededOnMainThread()

void v8::internal::Heap::ActivateMemoryReducerIfNeededOnMainThread ( )
private

Definition at line 3931 of file heap.cc.

3931  {
3932  // Activate memory reducer when switching to background if
3933  // - there was no mark compact since the start.
3934  // - the committed memory can be potentially reduced.
3935  // 2 pages for the old, code, and map space + 1 page for new space.
3936  const int kMinCommittedMemory = 7 * PageMetadata::kPageSize;
3937  if (ms_count_ == 0 && CommittedMemory() > kMinCommittedMemory &&
3938  isolate()->is_backgrounded()) {
3939  memory_reducer_->NotifyPossibleGarbage();
3940  }
3941 }
unsigned int ms_count_
Definition: heap.h:2272
size_t CommittedMemory()
Definition: heap.cc:364

References CommittedMemory(), isolate(), v8::internal::MutablePageMetadata::kPageSize, memory_reducer_, and ms_count_.

+ Here is the call graph for this function:

◆ AddAllocationObserversToAllSpaces()

void v8::internal::Heap::AddAllocationObserversToAllSpaces ( AllocationObserver observer,
AllocationObserver new_space_observer 
)

Definition at line 1027 of file heap.cc.

1028  {
1029  DCHECK(observer && new_space_observer);
1031  allocator()->AddAllocationObserver(observer, new_space_observer);
1032 }
void AddAllocationObserver(AllocationObserver *observer, AllocationObserver *new_space_observer)
HeapAllocator * allocator()
Definition: heap.h:1696
V8_EXPORT_PRIVATE void FreeMainThreadLinearAllocationAreas()
Definition: heap.cc:3693
DCHECK(IsNull(value)||IsNativeContext(value)||value==Smi::uninitialized_deserialization_value())

References v8::internal::HeapAllocator::AddAllocationObserver(), allocator(), v8::internal::DCHECK(), and FreeMainThreadLinearAllocationAreas().

Referenced by NotifyDeserializationComplete(), and v8::internal::SamplingHeapProfiler::SamplingHeapProfiler().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AddGCEpilogueCallback()

void v8::internal::Heap::AddGCEpilogueCallback ( v8::Isolate::GCCallbackWithData  callback,
GCType  gc_type_filter,
void *  data 
)

Definition at line 6446 of file heap.cc.

6447  {
6449  callback, reinterpret_cast<v8::Isolate*>(isolate()), gc_type, data);
6450 }
Isolate represents an isolated instance of the V8 engine.
Definition: v8-isolate.h:285
void Add(CallbackType callback, v8::Isolate *isolate, GCType gc_type, void *data)
Definition: gc-callbacks.h:24
GCCallbacks gc_epilogue_callbacks_
Definition: heap.h:2307

References v8::internal::GCCallbacks::Add(), gc_epilogue_callbacks_, and isolate().

Referenced by v8::Isolate::AddGCEpilogueCallback(), v8::internal::wasm::WasmEngine::AddIsolate(), and SetUp().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AddGCPrologueCallback()

void v8::internal::Heap::AddGCPrologueCallback ( v8::Isolate::GCCallbackWithData  callback,
GCType  gc_type_filter,
void *  data 
)

Definition at line 6435 of file heap.cc.

6436  {
6438  callback, reinterpret_cast<v8::Isolate*>(isolate()), gc_type, data);
6439 }
GCCallbacks gc_prologue_callbacks_
Definition: heap.h:2306

References v8::internal::GCCallbacks::Add(), gc_prologue_callbacks_, and isolate().

Referenced by v8::Isolate::AddGCPrologueCallback(), and SetUp().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AddHeapObjectAllocationTracker()

void v8::internal::Heap::AddHeapObjectAllocationTracker ( HeapObjectAllocationTracker tracker)

Definition at line 920 of file heap.cc.

921  {
922  if (allocation_trackers_.empty() && v8_flags.inline_new) {
924  }
925  allocation_trackers_.push_back(tracker);
926  if (allocation_trackers_.size() == 1) {
928  }
929 }
std::vector< HeapObjectAllocationTracker * > allocation_trackers_
Definition: heap.h:2452
V8_EXPORT_PRIVATE void DisableInlineAllocation()
Definition: heap.cc:5738
void UpdateLogObjectRelocation()
Definition: isolate.cc:4467
V8_EXPORT_PRIVATE FlagValues v8_flags

References allocation_trackers_, DisableInlineAllocation(), isolate_, v8::internal::Isolate::UpdateLogObjectRelocation(), and v8::internal::v8_flags.

Referenced by v8::internal::Heap::AllocationTrackerForDebugging::AllocationTrackerForDebugging(), v8::internal::HeapProfiler::StartHeapObjectsTracking(), and v8::internal::Debug::StartSideEffectCheckMode().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AddNearHeapLimitCallback()

void v8::internal::Heap::AddNearHeapLimitCallback ( v8::NearHeapLimitCallback  callback,
void *  data 
)

Definition at line 4360 of file heap.cc.

4361  {
4362  const size_t kMaxCallbacks = 100;
4363  CHECK_LT(near_heap_limit_callbacks_.size(), kMaxCallbacks);
4364  for (auto callback_data : near_heap_limit_callbacks_) {
4365  CHECK_NE(callback_data.first, callback);
4366  }
4367  near_heap_limit_callbacks_.push_back(std::make_pair(callback, data));
4368 }
std::vector< std::pair< v8::NearHeapLimitCallback, void * > > near_heap_limit_callbacks_
Definition: heap.h:2201
#define CHECK_LT(lhs, rhs)
#define CHECK_NE(lhs, rhs)

References CHECK_LT, CHECK_NE, and near_heap_limit_callbacks_.

Referenced by v8::Isolate::AddNearHeapLimitCallback().

+ Here is the caller graph for this function:

◆ AddRetainedMaps()

void v8::internal::Heap::AddRetainedMaps ( DirectHandle< NativeContext context,
GlobalHandleVector< Map maps 
)

Definition at line 6521 of file heap.cc.

6522  {
6523  Handle<WeakArrayList> array(Cast<WeakArrayList>(context->retained_maps()),
6524  isolate());
6525  int new_maps_size = static_cast<int>(maps.size()) * kRetainMapEntrySize;
6526  if (array->length() + new_maps_size > array->capacity()) {
6527  CompactRetainedMaps(*array);
6528  }
6529  int cur_length = array->length();
6530  array =
6531  WeakArrayList::EnsureSpace(isolate(), array, cur_length + new_maps_size);
6532  if (*array != context->retained_maps()) {
6533  context->set_retained_maps(*array);
6534  }
6535 
6536  {
6538  Tagged<WeakArrayList> raw_array = *array;
6539  for (DirectHandle<Map> map : maps) {
6541 
6542  if (map->is_in_retained_map_list()) {
6543  continue;
6544  }
6545 
6546  raw_array->Set(cur_length, MakeWeak(*map));
6547  raw_array->Set(cur_length + 1,
6548  Smi::FromInt(v8_flags.retain_maps_for_n_gc));
6549  cur_length += kRetainMapEntrySize;
6550  raw_array->set_length(cur_length);
6551 
6552  map->set_is_in_retained_map_list(true);
6553  }
6554  }
6555 }
static bool InAnySharedSpace(Tagged< HeapObject > object)
void CompactRetainedMaps(Tagged< WeakArrayList > retained_maps)
Definition: heap.cc:6557
static constexpr int kRetainMapEntrySize
Definition: heap.h:1880
static constexpr Tagged< Smi > FromInt(int value)
Definition: smi.h:38
static Handle< WeakArrayList > EnsureSpace(Isolate *isolate, Handle< WeakArrayList > array, int length, AllocationType allocation=AllocationType::kYoung)
Definition: fixed-array.cc:272
size_t const DisallowGarbageCollection & no_gc
Tagged< MaybeWeak< T > > MakeWeak(Tagged< T > value)
Definition: tagged.h:893
PerThreadAssertScopeDebugOnly< false, SAFEPOINTS_ASSERT, HEAP_ALLOCATION_ASSERT > DisallowGarbageCollection
Definition: assert-scope.h:241

References CompactRetainedMaps(), v8::internal::DCHECK(), v8::internal::WeakArrayList::EnsureSpace(), v8::internal::Smi::FromInt(), v8::internal::HeapLayout::InAnySharedSpace(), isolate(), kRetainMapEntrySize, v8::internal::MakeWeak(), v8::internal::anonymous_namespace{json-stringifier.cc}::no_gc, v8::internal::GlobalHandleVector< T >::size(), and v8::internal::v8_flags.

Referenced by v8::internal::OptimizedCompilationJob::RegisterWeakObjectsInOptimizedCode().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AddToRingBuffer()

void v8::internal::Heap::AddToRingBuffer ( const char *  string)
private

Definition at line 5298 of file heap.cc.

5298  {
5299  size_t first_part =
5300  std::min(strlen(string), kTraceRingBufferSize - ring_buffer_end_);
5301  memcpy(trace_ring_buffer_ + ring_buffer_end_, string, first_part);
5302  ring_buffer_end_ += first_part;
5303  if (first_part < strlen(string)) {
5304  ring_buffer_full_ = true;
5305  size_t second_part = strlen(string) - first_part;
5306  memcpy(trace_ring_buffer_, string + first_part, second_part);
5307  ring_buffer_end_ = second_part;
5308  }
5309 }
bool ring_buffer_full_
Definition: heap.h:2408
char trace_ring_buffer_[kTraceRingBufferSize]
Definition: heap.h:2403
static const int kTraceRingBufferSize
Definition: heap.h:322
size_t ring_buffer_end_
Definition: heap.h:2409

References kTraceRingBufferSize, ring_buffer_end_, ring_buffer_full_, and trace_ring_buffer_.

◆ AlignWithFillerBackground()

Tagged< HeapObject > v8::internal::Heap::AlignWithFillerBackground ( Tagged< HeapObject object,
int  object_size,
int  allocation_size,
AllocationAlignment  alignment 
)

Definition at line 3170 of file heap.cc.

3172  {
3173  const int filler_size = allocation_size - object_size;
3174  DCHECK_LT(0, filler_size);
3175  const int pre_filler = GetFillToAlign(object.address(), alignment);
3176  if (pre_filler) {
3177  object = PrecedeWithFillerBackground(object, pre_filler);
3178  }
3179  DCHECK_LE(0, filler_size - pre_filler);
3180  const int post_filler = filler_size - pre_filler;
3181  if (post_filler) {
3183  object.address() + object_size, post_filler));
3184  }
3185  return object;
3186 }
V8_EXPORT_PRIVATE Tagged< HeapObject > PrecedeWithFillerBackground(Tagged< HeapObject > object, int filler_size)
Definition: heap.cc:3163
void CreateFillerObjectAtBackground(const WritableFreeSpace &free_space)
Definition: heap.cc:3339
static V8_EXPORT_PRIVATE int GetFillToAlign(Address address, AllocationAlignment alignment)
Definition: heap.cc:3143
static WritableFreeSpace ForNonExecutableMemory(base::Address addr, size_t size)
#define DCHECK_LE(v1, v2)
Definition: logging.h:489
#define DCHECK_LT(v1, v2)
Definition: logging.h:488

References CreateFillerObjectAtBackground(), DCHECK_LE, DCHECK_LT, v8::internal::WritableFreeSpace::ForNonExecutableMemory(), GetFillToAlign(), and PrecedeWithFillerBackground().

+ Here is the call graph for this function:

◆ Allocate()

AllocationResult v8::internal::Heap::Allocate ( DirectHandle< Map map,
AllocationType  allocation 
)
private

Definition at line 327 of file setup-heap-internal.cc.

328  {
329  DCHECK(map->instance_type() != MAP_TYPE);
330  int size = map->instance_size();
332  AllocationResult allocation = AllocateRaw(size, allocation_type);
333  if (!allocation.To(&result)) return allocation;
334  // New space objects are allocated white.
335  WriteBarrierMode write_barrier_mode =
336  allocation_type == AllocationType::kYoung ? SKIP_WRITE_BARRIER
338  result->set_map_after_allocation(isolate(), *map, write_barrier_mode);
340 }
static AllocationResult FromObject(Tagged< HeapObject > heap_object)
V8_WARN_UNUSED_RESULT AllocationResult AllocateRaw(int size_in_bytes, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned)
Definition: heap-inl.h:198
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enables Turboshaft s StaticAssert and CheckTurboshaftTypeOf operations Wasm code into JS functions via the JS to Wasm wrappers are still inlined in TurboFan For controlling whether to at see turbo inline js wasm calls enable Turboshaft s loop unrolling enable an additional Turboshaft phase that performs optimizations based on type information enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps trace Turboshaft s if else to switch reducer invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the preconfigured old space size(in Mbytes)") DEFINE_INT(random_gc_interval
WriteBarrierMode
Definition: objects.h:51
@ SKIP_WRITE_BARRIER
Definition: objects.h:52
@ UPDATE_WRITE_BARRIER
Definition: objects.h:55
kInterpreterTrampolineOffset Tagged< HeapObject >

References AllocateRaw(), v8::internal::DCHECK(), v8::internal::AllocationResult::FromObject(), isolate(), v8::internal::kYoung, v8::base::internal::result, size(), v8::internal::SKIP_WRITE_BARRIER, v8::internal::AllocationResult::To(), and v8::internal::UPDATE_WRITE_BARRIER.

Referenced by CreateEarlyReadOnlyMapsAndObjects(), and CreateReadOnlyObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocatedExternalMemorySinceMarkCompact()

uint64_t v8::internal::Heap::AllocatedExternalMemorySinceMarkCompact ( ) const

Definition at line 5446 of file heap.cc.

5446  {
5448 }
uint64_t AllocatedSinceMarkCompact() const
Definition: heap.h:267
ExternalMemoryAccounting external_memory_
Definition: heap.h:2133

References v8::internal::Heap::ExternalMemoryAccounting::AllocatedSinceMarkCompact(), and external_memory_.

Referenced by AllocationLimitOvershotByLargeMargin().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateExternalBackingStore()

void * v8::internal::Heap::AllocateExternalBackingStore ( const std::function< void *(size_t)> &  allocate,
size_t  byte_length 
)

Definition at line 3188 of file heap.cc.

3189  {
3190  size_t max = isolate()->array_buffer_allocator()->MaxAllocationSize();
3192  if (byte_length > max) {
3193  return nullptr;
3194  }
3195  if (!always_allocate() && new_space()) {
3196  size_t new_space_backing_store_bytes =
3198  if ((!incremental_marking()->IsMajorMarking()) &&
3199  new_space_backing_store_bytes >= 2 * DefaultMaxSemiSpaceSize() &&
3200  new_space_backing_store_bytes >= byte_length) {
3201  // Performing a young generation GC amortizes over the allocated backing
3202  // store bytes and may free enough external bytes for this allocation.
3205  }
3206  }
3207  void* result = allocate(byte_length);
3208  if (result) return result;
3209  if (!always_allocate()) {
3210  for (int i = 0; i < 2; i++) {
3213  result = allocate(byte_length);
3214  if (result) return result;
3215  }
3218  }
3219  return allocate(byte_length);
3220 }
virtual size_t MaxAllocationSize() const
Returns a size_t that determines the largest ArrayBuffer that can be allocated.
V8_EXPORT_PRIVATE void CollectAllAvailableGarbage(GarbageCollectionReason gc_reason)
Definition: heap.cc:1351
V8_EXPORT_PRIVATE void CollectGarbage(AllocationSpace space, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags, PerformHeapLimitCheck check_heap_limit_reached=PerformHeapLimitCheck::kYes)
Definition: heap.cc:1595
bool always_allocate() const
Definition: heap.h:2017
NewSpace * new_space() const
Definition: heap.h:770
static V8_EXPORT_PRIVATE size_t DefaultMaxSemiSpaceSize()
Definition: heap.cc:5062
IncrementalMarking * incremental_marking() const
Definition: heap.h:1117
v8::ArrayBuffer::Allocator * array_buffer_allocator() const
Definition: isolate.h:1978
static constexpr size_t kMaxByteLength
size_t ExternalBackingStoreOverallBytes() const
Definition: new-spaces.h:203

References always_allocate(), v8::internal::Isolate::array_buffer_allocator(), CollectAllAvailableGarbage(), CollectGarbage(), v8::internal::DCHECK(), DefaultMaxSemiSpaceSize(), v8::internal::NewSpace::ExternalBackingStoreOverallBytes(), incremental_marking(), isolate(), v8::internal::kExternalMemoryPressure, v8::internal::JSArrayBuffer::kMaxByteLength, v8::ArrayBuffer::Allocator::MaxAllocationSize(), v8::internal::NEW_SPACE, new_space(), v8::internal::OLD_SPACE, and v8::base::internal::result.

Referenced by v8::internal::BackingStore::Allocate().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateMap()

AllocationResult v8::internal::Heap::AllocateMap ( AllocationType  allocation_type,
InstanceType  instance_type,
int  instance_size,
ElementsKind  elements_kind = TERMINAL_FAST_ELEMENTS_KIND,
int  inobject_properties = 0 
)
private

Definition at line 256 of file setup-heap-internal.cc.

260  {
261  static_assert(LAST_JS_OBJECT_TYPE == LAST_TYPE);
263  DCHECK_EQ(allocation_type, IsMutableMap(instance_type, elements_kind)
266  AllocationResult allocation = AllocateRaw(Map::kSize, allocation_type);
267  if (!allocation.To(&result)) return allocation;
268 
269  ReadOnlyRoots roots(this);
270  result->set_map_after_allocation(isolate(), roots.meta_map(),
272  Tagged<Map> map = isolate()->factory()->InitializeMap(
273  Cast<Map>(result), instance_type, instance_size, elements_kind,
274  inobject_properties, roots);
275 
276  return AllocationResult::FromObject(map);
277 }
Tagged< Map > InitializeMap(Tagged< Map > map, InstanceType type, int instance_size, ElementsKind elements_kind, int inobject_properties, ReadOnlyRoots roots)
Definition: factory.cc:2349
friend class ReadOnlyRoots
Definition: heap.h:2523
v8::internal::Factory * factory()
Definition: isolate.h:1553

References AllocateRaw(), DCHECK_EQ, v8::internal::Isolate::factory(), v8::internal::AllocationResult::FromObject(), v8::internal::Factory::InitializeMap(), isolate(), v8::internal::kMap, v8::internal::kReadOnly, v8::internal::LAST_TYPE, v8::base::internal::result, v8::internal::SKIP_WRITE_BARRIER, and v8::internal::AllocationResult::To().

Referenced by CreateLateReadOnlyJSReceiverMaps(), and CreateLateReadOnlyNonJSReceiverMaps().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocatePartialMap()

AllocationResult v8::internal::Heap::AllocatePartialMap ( InstanceType  instance_type,
int  instance_size 
)
private

Definition at line 304 of file setup-heap-internal.cc.

305  {
307  AllocationResult allocation =
309  if (!allocation.To(&result)) return allocation;
310  // Cast<Map> cannot be used due to uninitialized map field.
311  Tagged<Map> map = UncheckedCast<Map>(result);
313  UncheckedCast<Map>(isolate()->root(RootIndex::kMetaMap)),
314  instance_type, instance_size);
315  return AllocationResult::FromObject(map);
316 }
void InitializePartialMap(Isolate *isolate, Tagged< Map > map, Tagged< Map > meta_map, InstanceType instance_type, int instance_size)

References AllocateRaw(), v8::internal::AllocationResult::FromObject(), v8::internal::anonymous_namespace{setup-heap-internal.cc}::InitializePartialMap(), isolate(), v8::internal::kReadOnly, v8::base::internal::result, and v8::internal::AllocationResult::To().

Referenced by CreateEarlyReadOnlyMapsAndObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateRaw()

AllocationResult v8::internal::Heap::AllocateRaw ( int  size_in_bytes,
AllocationType  allocation,
AllocationOrigin  origin = AllocationOrigin::kRuntime,
AllocationAlignment  alignment = kTaggedAligned 
)
inlineprivate

Definition at line 198 of file heap-inl.h.

200  {
201  return heap_allocator_->AllocateRaw(size_in_bytes, type, origin, alignment);
202 }
V8_WARN_UNUSED_RESULT AllocationResult AllocateRaw(int size_in_bytes, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned, AllocationHint hint=AllocationHint())
HeapAllocator * heap_allocator_
Definition: heap.h:2139
unsigned char * type
Definition: trace-event.h:457

References v8::internal::HeapAllocator::AllocateRaw(), heap_allocator_, and v8::internal::tracing::type.

Referenced by Allocate(), AllocateMap(), AllocatePartialMap(), CreateEarlyReadOnlyMapsAndObjects(), CreateImportantReadOnlyObjects(), and CreateReadOnlyObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateRawOrFail()

Address v8::internal::Heap::AllocateRawOrFail ( int  size,
AllocationType  allocation,
AllocationOrigin  origin = AllocationOrigin::kRuntime,
AllocationAlignment  alignment = kTaggedAligned 
)
inlineprivate

Definition at line 204 of file heap-inl.h.

206  {
207  return heap_allocator_
208  ->AllocateRawWith<HeapAllocator::kRetryOrFail>(size, allocation, origin,
209  alignment)
210  .address();
211 }
V8_WARN_UNUSED_RESULT Tagged< HeapObject > AllocateRawWith(int size, AllocationType allocation, AllocationOrigin origin=AllocationOrigin::kRuntime, AllocationAlignment alignment=kTaggedAligned, AllocationHint hint=AllocationHint())

References v8::internal::HeapAllocator::AllocateRawWith(), heap_allocator_, v8::internal::HeapAllocator::kRetryOrFail, and size().

+ Here is the call graph for this function:

◆ AllocateRawWith()

template<AllocationRetryMode mode>
V8_WARN_UNUSED_RESULT Tagged<HeapObject> v8::internal::Heap::AllocateRawWith ( int  size,
AllocationType  allocation,
AllocationOrigin  origin = AllocationOrigin::kRuntime,
AllocationAlignment  alignment = kTaggedAligned 
)
inlineprivate

◆ allocation_sites_list()

Tagged<UnionOf<Smi, Undefined, AllocationSiteWithWeakNext> > v8::internal::Heap::allocation_sites_list ( )
inline

Definition at line 514 of file heap.h.

514  {
515  return allocation_sites_list_;
516  }
Tagged< UnionOf< Smi, Undefined, AllocationSiteWithWeakNext > > allocation_sites_list_
Definition: heap.h:2301

Referenced by v8::internal::ObjectStatsCollectorImpl::CollectGlobalStatistics(), DeoptMarkedAllocationSites(), v8::internal::AllocationSite::IsNested(), ProcessAllocationSites(), v8::internal::PretenuringHandler::ProcessPretenuringFeedback(), ProcessWeakListRoots(), and ResetAllAllocationSitesDependentCode().

+ Here is the caller graph for this function:

◆ allocation_sites_list_address()

Address v8::internal::Heap::allocation_sites_list_address ( )
inline

Definition at line 532 of file heap.h.

532  {
533  return reinterpret_cast<Address>(&allocation_sites_list_);
534  }
uintptr_t Address
Definition: memcopy.h:22

◆ allocation_type_for_in_place_internalizable_strings()

AllocationType v8::internal::Heap::allocation_type_for_in_place_internalizable_strings ( ) const
inlineprivate

Definition at line 2117 of file heap.h.

2117  {
2119  }

Referenced by v8::internal::Factory::AllocationTypeForInPlaceInternalizableString(), and v8::internal::LocalFactory::AllocationTypeForInPlaceInternalizableString().

+ Here is the caller graph for this function:

◆ AllocationLimitOvershotByLargeMargin()

bool v8::internal::Heap::AllocationLimitOvershotByLargeMargin ( ) const

Definition at line 5450 of file heap.cc.

5450  {
5451  // This guards against too eager finalization in small heaps.
5452  // The number is chosen based on v8.browsing_mobile on Nexus 7v2.
5453  constexpr size_t kMarginForSmallHeaps = 32u * MB;
5454 
5455  uint64_t size_now = OldGenerationConsumedBytes();
5456  if (!v8_flags.external_memory_accounted_in_global_limit) {
5458  }
5459  if (incremental_marking()->IsMajorMarking()) {
5460  // No interleaved GCs, so we count young gen as part of old gen.
5461  size_now += YoungGenerationConsumedBytes();
5462  }
5463 
5464  const size_t v8_overshoot = old_generation_allocation_limit() < size_now
5465  ? size_now - old_generation_allocation_limit()
5466  : 0;
5467  const size_t global_limit = global_allocation_limit();
5468  const size_t global_size = GlobalConsumedBytes();
5469  const size_t global_overshoot =
5470  global_limit < global_size ? global_size - global_limit : 0;
5471 
5472  // Bail out if the V8 and global sizes are still below their respective
5473  // limits.
5474  if (v8_overshoot == 0 && global_overshoot == 0) {
5475  return false;
5476  }
5477 
5478  // Overshoot margin is 50% of allocation limit or half-way to the max heap
5479  // with special handling of small heaps.
5480  const size_t v8_margin = std::min(
5481  std::max(old_generation_allocation_limit() / 2, kMarginForSmallHeaps),
5483  const size_t global_margin =
5484  std::min(std::max(global_limit / 2, kMarginForSmallHeaps),
5485  (max_global_memory_size_ - global_limit) / 2);
5486 
5487  return v8_overshoot >= v8_margin || global_overshoot >= global_margin;
5488 }
size_t max_global_memory_size_
Definition: heap.h:2156
size_t old_generation_allocation_limit() const
Definition: heap.h:1983
V8_EXPORT_PRIVATE size_t YoungGenerationConsumedBytes() const
Definition: heap.cc:5402
V8_EXPORT_PRIVATE size_t OldGenerationConsumedBytes() const
Definition: heap.cc:5381
size_t global_allocation_limit() const
Definition: heap.h:1987
V8_EXPORT_PRIVATE size_t GlobalConsumedBytes() const
Definition: heap.cc:5431
V8_EXPORT_PRIVATE uint64_t AllocatedExternalMemorySinceMarkCompact() const
Definition: heap.cc:5446
constexpr int MB
Definition: v8-internal.h:56

References AllocatedExternalMemorySinceMarkCompact(), global_allocation_limit(), GlobalConsumedBytes(), incremental_marking(), max_global_memory_size_, max_old_generation_size(), v8::internal::MB, old_generation_allocation_limit(), OldGenerationConsumedBytes(), v8::internal::v8_flags, and YoungGenerationConsumedBytes().

Referenced by RecomputeLimitsAfterLoadingIfNeeded(), ShouldExpandOldGenerationOnSlowAllocation(), ShouldExpandYoungGenerationOnSlowAllocation(), and ShouldOptimizeForLoadTime().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ allocator() [1/2]

HeapAllocator* v8::internal::Heap::allocator ( )
inline

Definition at line 1696 of file heap.h.

1696 { return heap_allocator_; }

Referenced by AddAllocationObserversToAllSpaces(), v8::internal::ScheduleMinorGCTaskObserver::AddToNewSpace(), v8::internal::Factory::allocator(), v8::internal::ScavengerCollector::CollectGarbage(), CreateReadOnlyObjects(), v8::internal::EvacuationAllocator::EvacuationAllocator(), v8::internal::MarkCompactCollector::Finish(), FreeMainThreadLinearAllocationAreas(), FreeSharedLinearAllocationAreasAndResetFreeLists(), IsPendingAllocationInternal(), MakeLinearAllocationAreasIterable(), v8::internal::MarkCompactCollector::MarkObjectsFromClientHeap(), MarkSharedLinearAllocationAreasBlack(), NewSpaceAllocationCounter(), NewSpaceLimit(), NewSpaceTop(), OldSpaceAllocationLimitAddress(), OldSpaceAllocationTopAddress(), v8::internal::IncrementalMarking::PauseBlackAllocation(), v8::internal::MarkCompactCollector::Prepare(), PublishMainThreadPendingAllocations(), RemoveAllocationObserversFromAllSpaces(), v8::internal::ScheduleMinorGCTaskObserver::RemoveFromNewSpace(), v8::internal::ConcurrentMarking::RunMajor(), v8::internal::ConcurrentMarking::RunMinorImpl(), SetUpSpaces(), v8::internal::IncrementalMarking::StartBlackAllocation(), v8::internal::IncrementalMarking::StartMarkingMajor(), StaticRootsEnsureAllocatedSize(), v8::internal::ScheduleMinorGCTaskObserver::Step(), v8::internal::IncrementalMarking::Stop(), TearDown(), UnmarkSharedLinearAllocationAreas(), and v8::internal::PauseAllocationObserversScope::~PauseAllocationObserversScope().

+ Here is the caller graph for this function:

◆ allocator() [2/2]

const HeapAllocator* v8::internal::Heap::allocator ( ) const
inline

Definition at line 1697 of file heap.h.

1697 { return heap_allocator_; }

◆ AllocatorLimitOnMaxOldGenerationSize()

size_t v8::internal::Heap::AllocatorLimitOnMaxOldGenerationSize ( )
static

Definition at line 278 of file heap.cc.

278  {
279 #ifdef V8_COMPRESS_POINTERS
280  // Isolate and the young generation are also allocated on the heap.
281  return kPtrComprCageReservationSize -
283  RoundUp(sizeof(Isolate), size_t{1} << kPageSizeBits);
284 #else
285  return std::numeric_limits<size_t>::max();
286 #endif
287 }
constexpr int kPageSizeBits
Definition: build_config.h:80
static V8_EXPORT_PRIVATE size_t YoungGenerationSizeFromSemiSpaceSize(size_t semi_space_size)
Definition: heap.cc:315
friend class Isolate
Definition: heap.h:2544
constexpr T RoundUp(T x, intptr_t m)
Definition: macros.h:387

References DefaultMaxSemiSpaceSize(), kPageSizeBits, RoundUp(), and YoungGenerationSizeFromSemiSpaceSize().

Referenced by ConfigureHeap(), InvokeNearHeapLimitCallback(), and MaxOldGenerationSizeFromPhysicalMemory().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllowedToBeMigrated()

bool v8::internal::Heap::AllowedToBeMigrated ( Tagged< Map map,
Tagged< HeapObject object,
AllocationSpace  dest 
)

Definition at line 7230 of file heap.cc.

7231  {
7232  // Object migration is governed by the following rules:
7233  //
7234  // 1) Objects in new-space can be migrated to the old space
7235  // that matches their target space or they stay in new-space.
7236  // 2) Objects in old-space stay in the same space when migrating.
7237  // 3) Fillers (two or more words) can migrate due to left-trimming of
7238  // fixed arrays in new-space or old space.
7239  // 4) Fillers (one word) can never migrate, they are skipped by
7240  // incremental marking explicitly to prevent invalid pattern.
7241  //
7242  // Since this function is used for debugging only, we do not place
7243  // asserts here, but check everything explicitly.
7244  if (map == ReadOnlyRoots(this).one_pointer_filler_map()) {
7245  return false;
7246  }
7247  InstanceType type = map->instance_type();
7248  MutablePageMetadata* chunk = MutablePageMetadata::FromHeapObject(object);
7249  AllocationSpace src = chunk->owner_identity();
7250  switch (src) {
7251  case NEW_SPACE:
7252  return dst == NEW_SPACE || dst == OLD_SPACE;
7253  case OLD_SPACE:
7254  return dst == OLD_SPACE;
7255  case CODE_SPACE:
7256  return dst == CODE_SPACE && type == INSTRUCTION_STREAM_TYPE;
7257  case SHARED_SPACE:
7258  return dst == SHARED_SPACE;
7259  case TRUSTED_SPACE:
7260  return dst == TRUSTED_SPACE;
7261  case SHARED_TRUSTED_SPACE:
7262  return dst == SHARED_TRUSTED_SPACE;
7263  case LO_SPACE:
7264  case CODE_LO_SPACE:
7265  case NEW_LO_SPACE:
7266  case SHARED_LO_SPACE:
7267  case TRUSTED_LO_SPACE:
7269  case RO_SPACE:
7270  return false;
7271  }
7272  UNREACHABLE();
7273 }
static MutablePageMetadata * FromHeapObject(Tagged< HeapObject > o)
@ TRUSTED_LO_SPACE
Definition: globals.h:1314
@ SHARED_TRUSTED_LO_SPACE
Definition: globals.h:1313
@ SHARED_TRUSTED_SPACE
Definition: globals.h:1308
@ SHARED_LO_SPACE
Definition: globals.h:1312

References v8::internal::CODE_LO_SPACE, v8::internal::CODE_SPACE, v8::internal::MutablePageMetadata::FromHeapObject(), v8::internal::LO_SPACE, v8::internal::NEW_LO_SPACE, v8::internal::NEW_SPACE, v8::internal::OLD_SPACE, v8::internal::MutablePageMetadata::owner_identity(), ReadOnlyRoots, v8::internal::RO_SPACE, v8::internal::SHARED_LO_SPACE, v8::internal::SHARED_SPACE, v8::internal::SHARED_TRUSTED_LO_SPACE, v8::internal::SHARED_TRUSTED_SPACE, v8::internal::TRUSTED_LO_SPACE, v8::internal::TRUSTED_SPACE, v8::internal::tracing::type, and v8::internal::UNREACHABLE().

Referenced by v8::internal::EvacuateVisitorBase::RawMigrateObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ always_allocate()

bool v8::internal::Heap::always_allocate ( ) const
inlineprivate

Definition at line 2017 of file heap.h.

2017 { return always_allocate_scope_count_ != 0; }
std::atomic< size_t > always_allocate_scope_count_
Definition: heap.h:2194

Referenced by v8::internal::IncrementalMarking::AdvanceAndFinalizeIfNecessary(), AllocateExternalBackingStore(), CollectGarbage(), CollectGarbageShared(), IncrementalMarkingLimitReached(), IsNewSpaceAllowedToGrowAboveTargetCapacity(), ShouldExpandOldGenerationOnSlowAllocation(), and ShouldExpandYoungGenerationOnSlowAllocation().

+ Here is the caller graph for this function:

◆ AppendArrayBufferExtension()

void v8::internal::Heap::AppendArrayBufferExtension ( ArrayBufferExtension extension)

Definition at line 4384 of file heap.cc.

4384  {
4385  // ArrayBufferSweeper is managing all counters and updating Heap counters.
4386  array_buffer_sweeper_->Append(extension);
4387 }
std::unique_ptr< ArrayBufferSweeper > array_buffer_sweeper_
Definition: heap.h:2334

References array_buffer_sweeper_.

Referenced by v8::internal::JSArrayBuffer::CreateExtension().

+ Here is the caller graph for this function:

◆ array_buffer_sweeper()

ArrayBufferSweeper* v8::internal::Heap::array_buffer_sweeper ( )
inline

◆ AsHeap()

Heap* v8::internal::Heap::AsHeap ( )
inline

Definition at line 887 of file heap.h.

887 { return this; }

◆ AttachCppHeap()

void v8::internal::Heap::AttachCppHeap ( v8::CppHeap cpp_heap)
private

Definition at line 6234 of file heap.cc.

6234  {
6235  // Only a single CppHeap can be attached at a time.
6237 
6240  cpp_heap_ = cpp_heap;
6241 }
static CppHeap * From(v8::CppHeap *heap)
Definition: cpp-heap.h:103
void AttachIsolate(Isolate *isolate)
Definition: cpp-heap.cc:601
v8::CppHeap * cpp_heap() const
Definition: heap.h:1167
std::unique_ptr< CppHeap > owning_cpp_heap_
Definition: heap.h:2365
v8::CppHeap * cpp_heap_
Definition: heap.h:2370
#define CHECK_IMPLIES(lhs, rhs)
#define CHECK(condition)
Definition: logging.h:124

References v8::internal::CppHeap::AttachIsolate(), CHECK, CHECK_IMPLIES, cpp_heap(), cpp_heap_, v8::internal::CppHeap::From(), incremental_marking(), isolate(), and owning_cpp_heap_.

Referenced by ConfigureHeap().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AutomaticallyRestoreInitialHeapLimit()

void v8::internal::Heap::AutomaticallyRestoreInitialHeapLimit ( double  threshold_percent)

Definition at line 4400 of file heap.cc.

4400  {
4402  initial_max_old_generation_size_ * threshold_percent;
4403 }
size_t initial_max_old_generation_size_
Definition: heap.h:2158
size_t initial_max_old_generation_size_threshold_
Definition: heap.h:2159

References initial_max_old_generation_size_, and initial_max_old_generation_size_threshold_.

Referenced by v8::Isolate::AutomaticallyRestoreInitialHeapLimit().

+ Here is the caller graph for this function:

◆ Available()

size_t v8::internal::Heap::Available ( )

Definition at line 400 of file heap.cc.

400  {
401  if (!HasBeenSetUp()) return 0;
402 
403  size_t total = 0;
404 
405  for (SpaceIterator it(this); it.HasNext();) {
406  total += it.Next()->Available();
407  }
408 
409  total += memory_allocator()->Available();
410  return total;
411 }
bool HasBeenSetUp() const
Definition: heap.cc:449
MemoryAllocator * memory_allocator()
Definition: heap.h:846

References v8::internal::MemoryAllocator::Available(), HasBeenSetUp(), v8::internal::SpaceIterator::HasNext(), and memory_allocator().

Referenced by PrintShortHeapStatistics(), and SetUpSpaces().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ backing_store_bytes()

uint64_t v8::internal::Heap::backing_store_bytes ( ) const
inline

Definition at line 668 of file heap.h.

668  {
669  return backing_store_bytes_.load(std::memory_order_relaxed);
670  }
std::atomic< uint64_t > backing_store_bytes_
Definition: heap.h:2186

Referenced by v8::internal::ArrayBufferSweeper::Finish(), v8::Isolate::GetHeapStatistics(), and PrintShortHeapStatistics().

+ Here is the caller graph for this function:

◆ CallGCEpilogueCallbacks()

void v8::internal::Heap::CallGCEpilogueCallbacks ( GCType  gc_type,
GCCallbackFlags  flags,
GCTracer::Scope::ScopeId  scope_id 
)

Definition at line 2720 of file heap.cc.

2721  {
2722  if (gc_epilogue_callbacks_.IsEmpty()) return;
2723 
2724  GCCallbacksScope scope(this);
2725  if (scope.CheckReenter()) {
2726  RCS_SCOPE(isolate(), RuntimeCallCounterId::kGCEpilogueCallback);
2727  TRACE_GC(tracer(), scope_id);
2728  HandleScope handle_scope(isolate());
2730  }
2731 }
void Invoke(GCType gc_type, GCCallbackFlags gc_callback_flags) const
Definition: gc-callbacks.h:38
friend class GCCallbacksScope
Definition: heap.h:2493
GCTracer * tracer()
Definition: heap.h:843
#define TRACE_GC(tracer, scope_id)
Definition: gc-tracer.h:35
Flag flags[]
Definition: flags.cc:292
#define RCS_SCOPE(...)

References v8::internal::GCCallbacksScope::CheckReenter(), v8::internal::flags, gc_epilogue_callbacks_, v8::internal::GCCallbacks::Invoke(), v8::internal::GCCallbacks::IsEmpty(), isolate(), RCS_SCOPE, TRACE_GC, and tracer().

Referenced by CollectGarbage(), InvokeIncrementalMarkingEpilogueCallbacks(), and v8::internal::GlobalHandles::InvokeSecondPassPhantomCallbacks().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CallGCPrologueCallbacks()

void v8::internal::Heap::CallGCPrologueCallbacks ( GCType  gc_type,
GCCallbackFlags  flags,
GCTracer::Scope::ScopeId  scope_id 
)

Definition at line 2707 of file heap.cc.

2708  {
2709  if (gc_prologue_callbacks_.IsEmpty()) return;
2710 
2711  GCCallbacksScope scope(this);
2712  if (scope.CheckReenter()) {
2713  RCS_SCOPE(isolate(), RuntimeCallCounterId::kGCPrologueCallback);
2714  TRACE_GC(tracer(), scope_id);
2715  HandleScope handle_scope(isolate());
2717  }
2718 }

References v8::internal::GCCallbacksScope::CheckReenter(), v8::internal::flags, gc_prologue_callbacks_, v8::internal::GCCallbacks::Invoke(), v8::internal::GCCallbacks::IsEmpty(), isolate(), RCS_SCOPE, TRACE_GC, and tracer().

Referenced by CollectGarbage(), InvokeIncrementalMarkingPrologueCallbacks(), and v8::internal::GlobalHandles::InvokeSecondPassPhantomCallbacks().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CanExpandOldGeneration()

bool v8::internal::Heap::CanExpandOldGeneration ( size_t  size) const

Definition at line 413 of file heap.cc.

413  {
414  if (force_oom_ || force_gc_on_next_allocation_) return false;
415  if (OldGenerationCapacity() + size > max_old_generation_size()) return false;
416  // Stay below `MaxReserved()` such that it is more likely that committing the
417  // second semi space at the beginning of a GC succeeds.
418  return memory_allocator()->Size() + size <= MaxReserved();
419 }
V8_EXPORT_PRIVATE size_t MaxReserved() const
Definition: heap.cc:202
V8_EXPORT_PRIVATE size_t OldGenerationCapacity() const
Definition: heap.cc:333
bool force_gc_on_next_allocation_
Definition: heap.h:2449

References force_gc_on_next_allocation_, force_oom_, max_old_generation_size(), MaxReserved(), memory_allocator(), OldGenerationCapacity(), v8::internal::MemoryAllocator::Size(), and size().

Referenced by CanPromoteYoungAndExpandOldGeneration(), ReachedHeapLimit(), and ShouldOptimizeForMemoryUsage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CanMoveObjectStart()

bool v8::internal::Heap::CanMoveObjectStart ( Tagged< HeapObject object)

Definition at line 3384 of file heap.cc.

3384  {
3385  if (!v8_flags.move_object_start) {
3386  return false;
3387  }
3388 
3389  // Sampling heap profiler may have a reference to the object.
3390  if (heap_profiler()->is_sampling_allocations()) {
3391  return false;
3392  }
3393 
3394  if (IsLargeObject(object)) {
3395  return false;
3396  }
3397 
3398  // Compilation jobs may have references to the object.
3399  if (isolate()->concurrent_recompilation_enabled() &&
3400  isolate()->optimizing_compile_dispatcher()->HasJobs()) {
3401  return false;
3402  }
3403 
3404  // Concurrent marking does not support moving object starts without snapshot
3405  // protocol.
3406  //
3407  // TODO(v8:13726): This can be improved via concurrently reading the contents
3408  // in the marker at the cost of some complexity.
3409  if (incremental_marking()->IsMarking()) {
3410  return false;
3411  }
3412 
3413  // Concurrent sweeper does not support moving object starts. It assumes that
3414  // markbits (black regions) and object starts are matching up.
3415  if (!MutablePageMetadata::FromHeapObject(object)->SweepingDone()) {
3416  return false;
3417  }
3418 
3419  return true;
3420 }
HeapProfiler * heap_profiler() const
Definition: heap.h:408
static V8_EXPORT_PRIVATE bool IsLargeObject(Tagged< HeapObject > object)
Definition: heap.cc:3427

References v8::internal::MutablePageMetadata::FromHeapObject(), heap_profiler(), incremental_marking(), IsLargeObject(), isolate(), and v8::internal::v8_flags.

Referenced by LeftTrimFixedArray(), and v8::internal::anonymous_namespace{elements.cc}::FastElementsAccessor< Subclass, KindTraits >::RemoveElement().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CanPromoteYoungAndExpandOldGeneration()

bool v8::internal::Heap::CanPromoteYoungAndExpandOldGeneration ( size_t  size) const

Definition at line 426 of file heap.cc.

426  {
427  if (v8_flags.sticky_mark_bits) {
429  size_t new_space_capacity =
431  size_t new_lo_space_capacity = new_lo_space_ ? new_lo_space_->Size() : 0;
432  return CanExpandOldGeneration(size + new_space_capacity +
433  new_lo_space_capacity);
434  }
435  if (!new_space()) {
438  }
439  size_t new_space_capacity =
440  new_space()->Capacity() + new_lo_space()->Size() +
441  (v8_flags.minor_ms ? 0
444 
445  // Over-estimate the new space size using capacity to allow some slack.
446  return CanExpandOldGeneration(size + new_space_capacity);
447 }
V8_EXPORT_PRIVATE bool CanExpandOldGeneration(size_t size) const
Definition: heap.cc:413
NewLargeObjectSpace * new_lo_space_
Definition: heap.h:2213
NewLargeObjectSpace * new_lo_space() const
Definition: heap.h:780
StickySpace * sticky_space() const
Definition: heap-inl.h:434
SemiSpaceNewSpace * semi_space_new_space() const
Definition: heap-inl.h:430
size_t Size() const override
Definition: large-spaces.h:46
virtual size_t Capacity() const =0
size_t QuarantinedPageCount() const
Definition: new-spaces.h:427
size_t young_objects_size() const
Definition: paged-spaces.h:477
#define DCHECK_NULL(val)
Definition: logging.h:490

References CanExpandOldGeneration(), v8::internal::PagedSpaceBase::Capacity(), v8::internal::NewSpace::Capacity(), DCHECK_NULL, v8::internal::MutablePageMetadata::kPageSize, new_lo_space(), new_lo_space_, new_space(), v8::internal::SemiSpaceNewSpace::QuarantinedPageCount(), semi_space_new_space(), v8::internal::LargeObjectSpace::Size(), size(), sticky_space(), v8::internal::v8_flags, and v8::internal::StickySpace::young_objects_size().

Referenced by v8::internal::ScavengerCollector::NumberOfScavengeTasks(), SelectGarbageCollector(), and ShouldExpandYoungGenerationOnSlowAllocation().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CanReferenceHeapObject()

bool v8::internal::Heap::CanReferenceHeapObject ( Tagged< HeapObject obj)

Definition at line 4611 of file heap.cc.

4611  {
4612  MemoryChunk* chunk = MemoryChunk::FromHeapObject(obj);
4613  // Objects in read-only space are allowed to be used in any isolate.
4614  if (chunk->InReadOnlySpace()) return true;
4615  Heap* obj_heap = chunk->GetHeap();
4616  Heap* expected_heap = chunk->InWritableSharedSpace()
4618  : this;
4619  return obj_heap == expected_heap;
4620 }
Isolate * shared_space_isolate() const
Definition: isolate.h:2335
static MemoryChunk * FromHeapObject(Tagged< HeapObject > object)
Definition: memory-chunk.h:190

References v8::internal::MemoryChunk::FromHeapObject(), v8::internal::MemoryChunk::GetHeap(), v8::internal::Isolate::heap(), v8::internal::MemoryChunk::InReadOnlySpace(), v8::internal::MemoryChunk::InWritableSharedSpace(), isolate(), and v8::internal::Isolate::shared_space_isolate().

Referenced by v8::internal::HandleScope::CreateHandle().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CanSafepoint()

bool v8::internal::Heap::CanSafepoint ( ) const
inline

Definition at line 686 of file heap.h.

686 { return deserialization_complete(); }
bool deserialization_complete() const
Definition: heap.h:681

Referenced by v8::internal::HeapAllocator::AllocateRaw().

+ Here is the caller graph for this function:

◆ CanShortcutStringsDuringGC()

bool v8::internal::Heap::CanShortcutStringsDuringGC ( GarbageCollector  collector) const

Definition at line 535 of file heap.cc.

535  {
536  if (!v8_flags.shortcut_strings_with_stack && IsGCWithStack()) return false;
537 
538  switch (collector) {
540  if (!v8_flags.minor_ms_shortcut_strings) return false;
541 
542  DCHECK(!incremental_marking()->IsMajorMarking());
543 
544  // Minor MS cannot short cut strings during concurrent marking.
545  if (incremental_marking()->IsMinorMarking()) return false;
546 
547  // Minor MS uses static roots to check for strings to shortcut.
548  if (!V8_STATIC_ROOTS_BOOL) return false;
549 
550  break;
552  // Scavenger cannot short cut strings during incremental marking.
553  DCHECK(!incremental_marking()->IsMajorMarking());
554 
555  if (isolate()->has_shared_space() &&
556  !isolate()->is_shared_space_isolate() &&
557  isolate()
558  ->shared_space_isolate()
559  ->heap()
561  ->IsMarking()) {
562  DCHECK(isolate()
563  ->shared_space_isolate()
564  ->heap()
566  ->IsMajorMarking());
567  return false;
568  }
569  break;
570  default:
571  UNREACHABLE();
572  }
573 
574  return true;
575 }
bool IsGCWithStack() const
Definition: heap.cc:531
other heap size max size of the shared heap(in Mbytes)
#define V8_STATIC_ROOTS_BOOL
Definition: v8config.h:999

References v8::internal::DCHECK(), heap(), incremental_marking(), IsGCWithStack(), isolate(), v8::internal::MINOR_MARK_SWEEPER, v8::internal::SCAVENGER, v8::internal::UNREACHABLE(), v8::internal::v8_flags, and V8_STATIC_ROOTS_BOOL.

+ Here is the call graph for this function:

◆ Capacity()

size_t v8::internal::Heap::Capacity ( )

Definition at line 326 of file heap.cc.

326  {
327  if (!HasBeenSetUp()) {
328  return 0;
329  }
331 }
size_t NewSpaceCapacity() const
Definition: heap.cc:4043

References HasBeenSetUp(), NewSpaceCapacity(), and OldGenerationCapacity().

Referenced by SetUpSpaces().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CheckCollectionRequested()

void v8::internal::Heap::CheckCollectionRequested ( )

Definition at line 2239 of file heap.cc.

2239  {
2240  if (!CollectionRequested()) return;
2241 
2245 }
GCFlags current_gc_flags_
Definition: heap.h:2416
V8_EXPORT_PRIVATE void CollectAllGarbage(GCFlags gc_flags, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap.cc:1267
bool CollectionRequested()
Definition: heap.cc:2194
GCCallbackFlags current_gc_callback_flags_
Definition: heap.h:2419

References CollectAllGarbage(), CollectionRequested(), current_gc_callback_flags_, current_gc_flags_, and v8::internal::kBackgroundAllocationFailure.

Referenced by HandleGCRequest(), and v8::internal::BackgroundCollectionInterruptTask::RunInternal().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CheckHandleCount()

void v8::internal::Heap::CheckHandleCount ( )

Definition at line 6623 of file heap.cc.

6623  {
6624  CheckHandleCountVisitor v;
6626 }
V8_EXPORT_PRIVATE void Iterate(i::RootVisitor *v)
Definition: api.cc:12069
HandleScopeImplementer * handle_scope_implementer() const
Definition: isolate.h:1412

References v8::internal::Isolate::handle_scope_implementer(), isolate_, and v8::internal::HandleScopeImplementer::Iterate().

Referenced by GarbageCollectionEpilogueInSafepoint().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CheckHeapLimitReached()

void v8::internal::Heap::CheckHeapLimitReached ( )
private

Definition at line 1811 of file heap.cc.

1811  {
1812  if (ReachedHeapLimit()) {
1814  if (ReachedHeapLimit()) {
1815  if (v8_flags.heap_snapshot_on_oom) {
1817  }
1818  FatalProcessOutOfMemory("Reached heap limit");
1819  }
1820  }
1821 }
void WriteSnapshotToDiskAfterGC(HeapSnapshotMode snapshot_mode=HeapSnapshotMode::kRegular)
bool InvokeNearHeapLimitCallback()
Definition: heap.cc:4405
bool ReachedHeapLimit()
Definition: heap.cc:1809
V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(const char *location)
Definition: heap.cc:6584

References FatalProcessOutOfMemory(), heap_profiler(), InvokeNearHeapLimitCallback(), ReachedHeapLimit(), v8::internal::v8_flags, and v8::internal::HeapProfiler::WriteSnapshotToDiskAfterGC().

Referenced by v8::internal::HeapAllocator::AllocateRawWithLightRetrySlowPath(), CollectAllAvailableGarbage(), and CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CheckIneffectiveMarkCompact()

void v8::internal::Heap::CheckIneffectiveMarkCompact ( size_t  old_generation_size,
double  mutator_utilization 
)
private

Definition at line 3861 of file heap.cc.

3862  {
3863  if (!v8_flags.detect_ineffective_gcs_near_heap_limit) return;
3864  if (!IsIneffectiveMarkCompact(old_generation_size, mutator_utilization)) {
3866  return;
3867  }
3872  // The callback increased the heap limit.
3874  return;
3875  }
3876  if (v8_flags.heap_snapshot_on_oom) {
3878  }
3879  FatalProcessOutOfMemory("Ineffective mark-compacts near heap limit");
3880  }
3881 }
bool IsIneffectiveMarkCompact(size_t old_generation_size, double mutator_utilization)
Definition: heap.cc:3848
int consecutive_ineffective_mark_compacts_
Definition: heap.h:2279
static constexpr int kMaxConsecutiveIneffectiveMarkCompacts
Definition: heap.cc:3858

References consecutive_ineffective_mark_compacts_, FatalProcessOutOfMemory(), heap_profiler(), InvokeNearHeapLimitCallback(), IsIneffectiveMarkCompact(), v8::internal::anonymous_namespace{heap.cc}::kMaxConsecutiveIneffectiveMarkCompacts, v8::internal::v8_flags, and v8::internal::HeapProfiler::WriteSnapshotToDiskAfterGC().

Referenced by RecomputeLimits().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CheckMemoryPressure()

void v8::internal::Heap::CheckMemoryPressure ( )

Definition at line 4273 of file heap.cc.

4273  {
4274  if (HighMemoryPressure()) {
4276  }
4277  // Reset the memory pressure level to avoid recursive GCs triggered by
4278  // CheckMemoryPressure from AdjustAmountOfExternalMemory called by
4279  // the finalizers.
4280  MemoryPressureLevel memory_pressure_level = memory_pressure_level_.exchange(
4281  MemoryPressureLevel::kNone, std::memory_order_relaxed);
4282  if (memory_pressure_level == MemoryPressureLevel::kCritical) {
4283  TRACE_EVENT0("devtools.timeline,v8", "V8.CheckMemoryPressure");
4285  } else if (memory_pressure_level == MemoryPressureLevel::kModerate) {
4286  if (v8_flags.incremental_marking && incremental_marking()->IsStopped()) {
4287  TRACE_EVENT0("devtools.timeline,v8", "V8.CheckMemoryPressure");
4290  }
4291  }
4292 }
void CollectGarbageOnMemoryPressure()
Definition: heap.cc:4294
V8_EXPORT_PRIVATE void StartIncrementalMarking(GCFlags gc_flags, GarbageCollectionReason gc_reason, GCCallbackFlags gc_callback_flags=GCCallbackFlags::kNoGCCallbackFlags, GarbageCollector collector=GarbageCollector::MARK_COMPACTOR)
Definition: heap.cc:1930
bool HighMemoryPressure()
Definition: heap.h:698
void FreeCachesOnMemoryPressure(Isolate *isolate)
Definition: heap.cc:1337
MemoryPressureLevel
Memory pressure level for the MemoryPressureNotification.
Definition: v8-isolate.h:175
#define TRACE_EVENT0(category_group, name)

References CollectGarbageOnMemoryPressure(), v8::internal::anonymous_namespace{heap.cc}::FreeCachesOnMemoryPressure(), HighMemoryPressure(), incremental_marking(), isolate(), v8::kCritical, v8::internal::kMemoryPressure, v8::kModerate, v8::kNone, v8::internal::kReduceMemoryFootprint, memory_pressure_level_, StartIncrementalMarking(), TRACE_EVENT0, and v8::internal::v8_flags.

Referenced by HandleGCRequest(), and MemoryPressureNotification().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearKeptObjects()

void v8::internal::Heap::ClearKeptObjects ( )

Definition at line 7147 of file heap.cc.

7147  {
7148  set_weak_refs_keep_during_job(ReadOnlyRoots(isolate()).undefined_value());
7149 }

References isolate(), and ReadOnlyRoots.

Referenced by v8::internal::Isolate::ClearKeptObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearRecordedSlotRange()

void v8::internal::Heap::ClearRecordedSlotRange ( Address  start,
Address  end 
)

Definition at line 6649 of file heap.cc.

6649  {
6650 #ifndef V8_DISABLE_WRITE_BARRIERS
6651  MemoryChunk* chunk = MemoryChunk::FromAddress(start);
6652  DCHECK(!chunk->IsLargePage());
6653 #if !V8_ENABLE_STICKY_MARK_BITS_BOOL
6654  if (!chunk->InYoungGeneration())
6655 #endif
6656  {
6657  PageMetadata* page = PageMetadata::cast(chunk->Metadata());
6658  // This method will be invoked on objects in shared space for
6659  // internalization and string forwarding during GC.
6660  DCHECK(page->owner_identity() == OLD_SPACE ||
6661  page->owner_identity() == TRUSTED_SPACE ||
6662  page->owner_identity() == SHARED_SPACE);
6663 
6664  if (!page->SweepingDone()) {
6668  page, start, end, SlotSet::KEEP_EMPTY_BUCKETS);
6671  }
6672  }
6673 #endif
6674 }
friend class PageMetadata
Definition: heap.h:2516
static MemoryChunk * FromAddress(Address addr)
Definition: memory-chunk.h:185
static PageMetadata * cast(MemoryChunkMetadata *metadata)
Definition: page-metadata.h:35
static void RemoveRange(MutablePageMetadata *chunk, Address start, Address end, SlotSet::EmptyBucketMode mode)
Node::Uses::const_iterator end(const Node::Uses &uses)
Definition: node.h:711

References v8::internal::PageMetadata::cast(), v8::internal::DCHECK(), v8::internal::compiler::end(), v8::internal::MemoryChunk::FromAddress(), v8::internal::MemoryChunk::InYoungGeneration(), v8::internal::MemoryChunk::IsLargePage(), heap::base::BasicSlotSet< kTaggedSize >::KEEP_EMPTY_BUCKETS, v8::internal::MemoryChunk::Metadata(), v8::internal::OLD_SPACE, v8::internal::MutablePageMetadata::owner_identity(), v8::internal::RememberedSet< type >::RemoveRange(), v8::internal::SHARED_SPACE, v8::internal::MutablePageMetadata::SweepingDone(), and v8::internal::TRUSTED_SPACE.

Referenced by CreateFillerObjectAtRaw().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ code_lo_space()

CodeLargeObjectSpace* v8::internal::Heap::code_lo_space ( ) const
inline

Definition at line 778 of file heap.h.

778 { return code_lo_space_; }
CodeLargeObjectSpace * code_lo_space_
Definition: heap.h:2212

Referenced by DeactivateMajorGCInProgressFlag(), v8::internal::anonymous_namespace{deoptimizer.cc}::DeoptimizableCodeIterator::Next(), v8::internal::OldGenerationMemoryChunkIterator::next(), OldGenerationSizeOfObjects(), v8::internal::IncrementalMarking::OldGenerationSizeOfObjects(), and Unmark().

+ Here is the caller graph for this function:

◆ code_range()

CodeRange* v8::internal::Heap::code_range ( )
inline

Definition at line 874 of file heap.h.

874  {
875 #ifdef V8_COMPRESS_POINTERS
876  return code_range_;
877 #else
878  return code_range_.get();
879 #endif
880  }
std::unique_ptr< CodeRange > code_range_
Definition: heap.h:2360

Referenced by v8::internal::Isolate::Init(), and v8::internal::CodeDataSourceIncrementalState::InternIsolate().

+ Here is the caller graph for this function:

◆ code_range_base()

Address v8::internal::Heap::code_range_base ( )
inline

Definition at line 185 of file heap-inl.h.

185  {
186  return code_range_ ? code_range_->base() : kNullAddress;
187 }

References code_range_, and v8::internal::kNullAddress.

Referenced by v8::internal::Isolate::AddCrashKeysForIsolateAndHeapPointers(), and v8::internal::AssemblerOptions::Default().

+ Here is the caller graph for this function:

◆ code_region()

const base::AddressRegion & v8::internal::Heap::code_region ( )
inline

Definition at line 180 of file heap-inl.h.

180  {
181  static constexpr base::AddressRegion kEmptyRegion;
182  return code_range_ ? code_range_->reservation()->region() : kEmptyRegion;
183 }

References code_range_.

Referenced by v8::internal::anonymous_namespace{setup-builtins-internal.cc}::BuiltinAssemblerOptions(), v8::Isolate::GetCodeRange(), v8::internal::TickSample::GetStackSample(), v8::internal::Isolate::InitializeIsShortBuiltinCallsEnabled(), and v8::internal::Isolate::MaybeRemapEmbeddedBuiltinsIntoCodeRange().

+ Here is the caller graph for this function:

◆ code_space()

◆ CollectAllAvailableGarbage()

void v8::internal::Heap::CollectAllAvailableGarbage ( GarbageCollectionReason  gc_reason)

Definition at line 1351 of file heap.cc.

1351  {
1352  // Min and max number of attempts for GC. The method will continue with more
1353  // GCs until the root set is stable.
1354  static constexpr int kMaxNumberOfAttempts = 7;
1355  static constexpr int kMinNumberOfAttempts = 2;
1356 
1357  // Returns the number of roots. We assume stack layout is stable but global
1358  // roots could change between GCs due to finalizers and weak callbacks.
1359  const auto num_roots = [this]() {
1360  size_t js_roots = 0;
1361  js_roots += isolate()->global_handles()->handles_count();
1362  js_roots += isolate()->eternal_handles()->handles_count();
1363  size_t cpp_roots = 0;
1364  if (auto* cpp_heap = CppHeap::From(cpp_heap_)) {
1365  cpp_roots += cpp_heap->GetStrongPersistentRegion().NodesInUse();
1366  cpp_roots +=
1367  cpp_heap->GetStrongCrossThreadPersistentRegion().NodesInUse();
1368  }
1369  return js_roots + cpp_roots;
1370  };
1371 
1372  if (gc_reason == GarbageCollectionReason::kLastResort) {
1374  }
1375  RCS_SCOPE(isolate(), RuntimeCallCounterId::kGC_Custom_AllAvailableGarbage);
1376 
1378 
1380 
1381  if (gc_reason == GarbageCollectionReason::kLastResort) {
1382  gc_flags |= GCFlag::kLastResort;
1383  }
1384 
1386  gc_flags |= GCFlag::kForced;
1387  }
1388 
1389  const auto perform_heap_limit_check = v8_flags.late_heap_limit_check
1392 
1393  for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
1394  const size_t roots_before = num_roots();
1395  current_gc_flags_ = gc_flags;
1397  perform_heap_limit_check);
1399 
1400  // As long as we are at or above the heap limit, we need another GC to
1401  // survive CheckHeapLimitReached() after the loop.
1402  if (ReachedHeapLimit()) {
1403  continue;
1404  }
1405 
1406  if ((roots_before == num_roots()) &&
1407  ((attempt + 1) >= kMinNumberOfAttempts)) {
1408  break;
1409  }
1410  }
1411 
1413 
1415 
1416  if (v8_flags.trace_duplicate_threshold_kb) {
1417  std::map<int, std::vector<Tagged<HeapObject>>> objects_by_size;
1418  PagedSpaceIterator spaces(this);
1419  for (PagedSpace* space = spaces.Next(); space != nullptr;
1420  space = spaces.Next()) {
1421  PagedSpaceObjectIterator it(this, space);
1422  for (Tagged<HeapObject> obj = it.Next(); !obj.is_null();
1423  obj = it.Next()) {
1424  objects_by_size[obj->Size()].push_back(obj);
1425  }
1426  }
1427  {
1428  LargeObjectSpaceObjectIterator it(lo_space());
1429  for (Tagged<HeapObject> obj = it.Next(); !obj.is_null();
1430  obj = it.Next()) {
1431  objects_by_size[obj->Size()].push_back(obj);
1432  }
1433  }
1434  for (auto it = objects_by_size.rbegin(); it != objects_by_size.rend();
1435  ++it) {
1436  ReportDuplicates(it->first, &it->second);
1437  }
1438  }
1439 
1440  if (gc_reason == GarbageCollectionReason::kLastResort &&
1441  v8_flags.heap_snapshot_on_oom) {
1443  }
1444 }
void EagerlyFreeExternalMemoryAndWasmCode()
Definition: heap.cc:4351
Space * space(int idx) const
Definition: heap-inl.h:158
OldLargeObjectSpace * lo_space() const
Definition: heap.h:777
void CheckHeapLimitReached()
Definition: heap.cc:1811
GlobalHandles * global_handles() const
Definition: isolate.h:1431
EternalHandles * eternal_handles() const
Definition: isolate.h:1435
void ReportDuplicates(int size, std::vector< Tagged< HeapObject >> *objects)
Definition: heap.cc:1293
base::Flags< GCFlag, uint8_t > GCFlags
Definition: heap.h:209
@ kNoGCCallbackFlags
Definition: v8-callbacks.h:180

References CheckHeapLimitReached(), CollectGarbage(), cpp_heap(), cpp_heap_, current_gc_flags_, DCHECK_EQ, EagerlyFreeExternalMemoryAndWasmCode(), v8::internal::Isolate::eternal_handles(), v8::internal::anonymous_namespace{heap.cc}::FreeCachesOnMemoryPressure(), v8::internal::CppHeap::From(), v8::internal::Isolate::global_handles(), v8::internal::GlobalHandles::handles_count(), v8::internal::EternalHandles::handles_count(), heap_profiler(), InvokeNearHeapLimitCallback(), v8::internal::Tagged< HeapObject >::is_null(), isolate(), v8::internal::kForced, v8::internal::kLastResort, v8::internal::kLowMemoryNotification, v8::internal::kNo, v8::internal::kNoFlags, v8::kNoGCCallbackFlags, v8::internal::kReduceMemoryFootprint, v8::internal::kYes, lo_space(), v8::internal::PagedSpaceIterator::Next(), v8::internal::LargeObjectSpaceObjectIterator::Next(), v8::internal::PagedSpaceObjectIterator::Next(), v8::internal::OLD_SPACE, RCS_SCOPE, ReachedHeapLimit(), v8::internal::anonymous_namespace{heap.cc}::ReportDuplicates(), space(), v8::internal::v8_flags, and v8::internal::HeapProfiler::WriteSnapshotToDiskAfterGC().

Referenced by AllocateExternalBackingStore(), v8::internal::BackgroundMergeTask::BeginMergeInBackground(), v8::internal::HeapAllocator::CollectAllAvailableGarbage(), CollectGarbageWithRetry(), v8::internal::SnapshotCreatorImpl::CreateBlob(), v8::internal::HeapSnapshotGenerator::GenerateSnapshot(), v8::internal::HeapProfiler::GetDetachedJSWrapperObjects(), v8::Isolate::LowMemoryNotification(), v8::internal::HeapProfiler::QueryObjects(), v8::Shell::RunMain(), and v8::internal::Snapshot::SerializeDeserializeAndVerifyForTesting().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CollectAllGarbage()

void v8::internal::Heap::CollectAllGarbage ( GCFlags  gc_flags,
GarbageCollectionReason  gc_reason,
const GCCallbackFlags  gc_callback_flags = kNoGCCallbackFlags 
)

Definition at line 1267 of file heap.cc.

1269  {
1270  current_gc_flags_ = gc_flags;
1271  CollectGarbage(OLD_SPACE, gc_reason, gc_callback_flags);
1273 }

References CollectGarbage(), current_gc_flags_, DCHECK_EQ, v8::internal::kNoFlags, and v8::internal::OLD_SPACE.

Referenced by CheckCollectionRequested(), v8::internal::CppHeap::CollectGarbage(), CollectGarbageFromAnyThread(), CollectGarbageOnMemoryPressure(), FinalizeIncrementalMarkingAtomically(), v8::internal::SamplingHeapProfiler::GetAllocationProfile(), v8::internal::Debug::GetLoadedScripts(), HandleExternalMemoryInterrupt(), HandleGCRequest(), v8::internal::Deoptimizer::MaterializeHeapObjects(), PreciseCollectAllGarbage(), v8::internal::anonymous_namespace{objects.cc}::RehashObjectHashTableAndGCIfNeeded(), and v8::internal::Isolate::StackOverflow().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CollectCodeStatistics()

void v8::internal::Heap::CollectCodeStatistics ( )

Definition at line 4447 of file heap.cc.

4447  {
4448  TRACE_EVENT0("v8", "Heap::CollectCodeStatistics");
4449  SafepointScope safepoint_scope(isolate(),
4451  MakeHeapIterable();
4453  // We do not look for code in new space, or map space. If code
4454  // somehow ends up in those spaces, we would miss it here.
4460 }
static void ResetCodeAndMetadataStatistics(Isolate *isolate)
Definition: code-stats.cc:51
static void CollectCodeStatistics(PagedSpace *space, Isolate *isolate)
Definition: code-stats.cc:63
TrustedSpace * trusted_space_
Definition: heap.h:2216
V8_EXPORT_PRIVATE void MakeHeapIterable()
Definition: heap.cc:3661
OldSpace * old_space_
Definition: heap.h:2208
TrustedLargeObjectSpace * trusted_lo_space_
Definition: heap.h:2218
static constexpr GlobalSafepointForSharedSpaceIsolateTag kGlobalSafepointForSharedSpaceIsolate
Definition: safepoint.h:241

References code_lo_space_, code_space_, v8::internal::CodeStatistics::CollectCodeStatistics(), isolate(), v8::internal::kGlobalSafepointForSharedSpaceIsolate, MakeHeapIterable(), old_space_, v8::internal::CodeStatistics::ResetCodeAndMetadataStatistics(), TRACE_EVENT0, trusted_lo_space_, and trusted_space_.

Referenced by v8::Isolate::GetHeapCodeAndMetadataStatistics().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CollectGarbage()

void v8::internal::Heap::CollectGarbage ( AllocationSpace  space,
GarbageCollectionReason  gc_reason,
const GCCallbackFlags  gc_callback_flags = kNoGCCallbackFlags,
PerformHeapLimitCheck  check_heap_limit_reached = PerformHeapLimitCheck::kYes 
)

Definition at line 1595 of file heap.cc.

1598  {
1602 
1604  // During isolate initialization heap always grows. GC is only requested
1605  // if a new page allocation fails. In such a case we should crash with
1606  // an out-of-memory instead of performing GC because the prologue/epilogue
1607  // callbacks may see objects that are not yet deserialized.
1609  FatalProcessOutOfMemory("GC during deserialization");
1610  }
1611 
1612  // CollectGarbage consists of three parts:
1613  // 1. The prologue part which may execute callbacks. These callbacks may
1614  // allocate and trigger another garbage collection.
1615  // 2. The main garbage collection phase.
1616  // 3. The epilogue part which may execute callbacks. These callbacks may
1617  // allocate and trigger another garbage collection
1618 
1619  // Part 1: Invoke all callbacks which should happen before the actual garbage
1620  // collection is triggered. Note that these callbacks may trigger another
1621  // garbage collection since they may allocate.
1622 
1623  // JS execution is not allowed in any of the callbacks.
1624  DisallowJavascriptExecution no_js(isolate());
1625 
1626  // Some custom flushing (currently: FlushBytecodeFromSFI) can create
1627  // fresh TrustedPointerTableEntries during GC. These must not be affected
1628  // by an active TrustedPointerPublishingScope, so disable any such scope.
1629  DisableTrustedPointerPublishingScope no_trusted_pointer_tracking(isolate());
1630 
1631  DCHECK(AllowGarbageCollection::IsAllowed());
1632  // TODO(chromium:1523607): Ensure this for standalone cppgc as well.
1633  CHECK_IMPLIES(!v8_flags.allow_allocation_in_fast_api_call,
1634  !isolate()->InFastCCall());
1635 
1636  const char* collector_reason = nullptr;
1637  const GarbageCollector collector =
1638  SelectGarbageCollector(space, gc_reason, &collector_reason);
1640  DCHECK_IMPLIES(v8_flags.minor_ms && IsYoungGenerationCollector(collector),
1641  !ShouldReduceMemory());
1642 
1643  if (collector == GarbageCollector::MARK_COMPACTOR &&
1644  incremental_marking()->IsMinorMarking()) {
1645  const GCFlags gc_flags = current_gc_flags_;
1646  // Minor GCs should not be memory reducing.
1647  current_gc_flags_ &= ~GCFlag::kReduceMemoryFootprint;
1650  current_gc_flags_ = gc_flags;
1651  }
1652 
1653  const GCType gc_type = GetGCTypeFromGarbageCollector(collector);
1654 
1655  // Prologue callbacks. These callbacks may trigger GC themselves and thus
1656  // cannot be related exactly to garbage collection cycles.
1657  //
1658  // GCTracer scopes are managed by callees.
1659  InvokeExternalCallbacks(isolate(), [this, gc_callback_flags, gc_type]() {
1660  // Ensure that all pending phantom callbacks are invoked.
1662 
1663  // Prologue callbacks registered with Heap.
1664  CallGCPrologueCallbacks(gc_type, gc_callback_flags,
1665  GCTracer::Scope::HEAP_EXTERNAL_PROLOGUE);
1666  });
1667 
1668  // The main garbage collection phase.
1669  //
1670  // We need a stack marker at the top of all entry points to allow
1671  // deterministic passes over the stack. E.g., a verifier that should only
1672  // find a subset of references of the marker.
1673  //
1674  // TODO(chromium:1056170): Consider adding a component that keeps track
1675  // of relevant GC stack regions where interesting pointers can be found.
1676  stack().SetMarkerIfNeededAndCallback([this, collector, gc_reason,
1677  collector_reason, gc_callback_flags]() {
1678  DisallowGarbageCollection no_gc_during_gc;
1679 
1680  size_t committed_memory_before =
1683  : 0;
1684 
1686  VMState<GC> state(isolate());
1687  DevToolsTraceEventScope devtools_trace_event_scope(
1688  this, IsYoungGenerationCollector(collector) ? "MinorGC" : "MajorGC",
1689  ToString(gc_reason));
1690 
1691  GarbageCollectionPrologue(gc_reason, gc_callback_flags);
1692  {
1693  GCTracer::RecordGCPhasesInfo record_gc_phases_info(this, collector,
1694  gc_reason);
1695  std::optional<TimedHistogramScope> histogram_timer_scope;
1696  std::optional<OptionalTimedHistogramScope> histogram_timer_priority_scope;
1697  TRACE_EVENT0("v8", record_gc_phases_info.trace_event_name());
1698  if (record_gc_phases_info.type_timer()) {
1699  histogram_timer_scope.emplace(record_gc_phases_info.type_timer(),
1700  isolate_);
1701  }
1702  if (record_gc_phases_info.type_priority_timer()) {
1703  histogram_timer_priority_scope.emplace(
1704  record_gc_phases_info.type_priority_timer(), isolate_,
1706  }
1707 
1708  PerformGarbageCollection(collector, gc_reason, collector_reason);
1709 
1710  // Clear flags describing the current GC now that the current GC is
1711  // complete. Do this before GarbageCollectionEpilogue() since that could
1712  // trigger another unforced GC.
1713  is_current_gc_forced_ = false;
1715 
1716  if (collector == GarbageCollector::MARK_COMPACTOR ||
1717  collector == GarbageCollector::SCAVENGER) {
1718  tracer()->RecordGCPhasesHistograms(record_gc_phases_info.mode());
1719  }
1720  if ((collector == GarbageCollector::MARK_COMPACTOR ||
1721  collector == GarbageCollector::MINOR_MARK_SWEEPER) &&
1722  cpp_heap()) {
1724  }
1725  }
1726 
1727  GarbageCollectionEpilogue(collector);
1728  if (collector == GarbageCollector::MARK_COMPACTOR &&
1729  v8_flags.track_detached_contexts) {
1731  }
1732 
1733  if (collector == GarbageCollector::MARK_COMPACTOR) {
1734  if (memory_reducer_ != nullptr) {
1735  memory_reducer_->NotifyMarkCompact(committed_memory_before);
1736  }
1741  }
1742  }
1743 
1744  tracer()->StopAtomicPause();
1746  // Young generation cycles finish atomically. It is important that
1747  // StopObservablePause, and StopCycle are called in this
1748  // order; the latter may replace the current event with that of an
1749  // interrupted full cycle.
1750  if (IsYoungGenerationCollector(collector)) {
1752  } else {
1754  }
1755  RecomputeLimits(collector, base::TimeTicks::Now());
1756  });
1757 
1758  if ((collector == GarbageCollector::MARK_COMPACTOR) &&
1760  if (ShouldOptimizeForLoadTime()) {
1762  }
1764  }
1765 
1766  // Epilogue callbacks. These callbacks may trigger GC themselves and thus
1767  // cannot be related exactly to garbage collection cycles.
1768  //
1769  // GCTracer scopes are managed by callees.
1770  InvokeExternalCallbacks(isolate(), [this, gc_callback_flags, gc_type]() {
1771  // Epilogue callbacks registered with Heap.
1772  CallGCEpilogueCallbacks(gc_type, gc_callback_flags,
1773  GCTracer::Scope::HEAP_EXTERNAL_EPILOGUE);
1774 
1776  gc_callback_flags);
1777  });
1778 
1779  if (collector == GarbageCollector::MARK_COMPACTOR) {
1780  if ((gc_callback_flags &
1783  }
1784  if (v8_flags.heap_snapshot_on_gc > 0 &&
1785  static_cast<size_t>(v8_flags.heap_snapshot_on_gc) == ms_count_) {
1787  }
1788  } else {
1789  // Start incremental marking for the next cycle. We do this only for
1790  // minor GCs to avoid a loop where mark-compact causes another mark-compact.
1794  if (v8_flags.minor_ms &&
1797  }
1798  }
1799 
1800  if (perform_heap_limit_check == PerformHeapLimitCheck::kYes) {
1802  }
1803 
1804  if (collector == GarbageCollector::MARK_COMPACTOR) {
1806  }
1807 }
void SetMarkerIfNeededAndCallback(Callback callback)
Definition: stack.h:79
static Isolate * TryGetCurrent()
Returns the entered isolate for the current thread or NULL in case there is no current isolate.
Definition: api.cc:9924
static TimeTicks Now()
Definition: time.cc:736
void FinishAtomicSweepingIfRunning()
Definition: cpp-heap.cc:1246
void StopObservablePause(GarbageCollector collector, base::TimeTicks time)
Definition: gc-tracer.cc:339
void StopFullCycleIfFinished()
Definition: gc-tracer.cc:507
void StartObservablePause(base::TimeTicks time)
Definition: gc-tracer.cc:206
void RecordGCPhasesHistograms(RecordGCPhasesInfo::Mode mode)
Definition: gc-tracer.cc:1448
void StopYoungCycleIfFinished()
Definition: gc-tracer.cc:517
void PostGarbageCollectionProcessing(v8::GCCallbackFlags gc_callback_flags)
GarbageCollector SelectGarbageCollector(AllocationSpace space, GarbageCollectionReason gc_reason, const char **reason) const
Definition: heap.cc:473
ResizeNewSpaceMode resize_new_space_mode_
Definition: heap.h:2462
bool update_allocation_limits_after_loading_
Definition: heap.h:2472
void GarbageCollectionPrologue(GarbageCollectionReason gc_reason, const v8::GCCallbackFlags gc_callback_flags)
Definition: heap.cc:950
V8_EXPORT_PRIVATE void StartIncrementalMarkingIfAllocationLimitIsReached(LocalHeap *local_heap, GCFlags gc_flags, GCCallbackFlags gc_callback_flags=GCCallbackFlags::kNoGCCallbackFlags)
Definition: heap.cc:2037
static bool IsYoungGenerationCollector(GarbageCollector collector)
Definition: heap.h:361
LocalHeap * main_thread_local_heap()
Definition: heap.h:885
V8_EXPORT_PRIVATE size_t OldGenerationSizeOfObjects() const
Definition: heap.cc:5351
void RecomputeLimits(GarbageCollector collector, base::TimeTicks time)
Definition: heap.cc:2614
bool is_current_gc_forced_
Definition: heap.h:2424
GarbageCollector current_or_last_garbage_collector_
Definition: heap.h:2426
V8_EXPORT_PRIVATE bool ShouldOptimizeForLoadTime() const
Definition: heap.cc:5490
GCFlags GCFlagsForIncrementalMarking()
Definition: heap.h:1085
bool is_current_gc_for_heap_profiler_
Definition: heap.h:2425
void GarbageCollectionEpilogue(GarbageCollector collector)
Definition: heap.cc:1163
size_t CommittedOldGenerationMemory()
Definition: heap.cc:348
V8_EXPORT_PRIVATE void StartMinorMSConcurrentMarkingIfNeeded()
Definition: heap.cc:1237
V8_EXPORT_PRIVATE ::heap::base::Stack & stack()
Definition: heap.cc:6255
bool deserialization_complete_
Definition: heap.h:2439
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags, GCTracer::Scope::ScopeId scope_id)
Definition: heap.cc:2707
void PerformGarbageCollection(GarbageCollector collector, GarbageCollectionReason gc_reason, const char *collector_reason)
Definition: heap.cc:2299
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags, GCTracer::Scope::ScopeId scope_id)
Definition: heap.cc:2720
void SetOldGenerationAndGlobalMaximumSize(size_t max_old_generation_size)
Definition: heap.cc:1551
bool is_full_gc_during_loading_
Definition: heap.h:2476
bool ShouldReduceMemory() const
Definition: heap.h:1671
void CountUsage(v8::Isolate::UseCounterFeature feature)
Definition: isolate.cc:7153
void CheckDetachedContextsAfterGC()
Definition: isolate.cc:7206
void InvokeExternalCallbacks(Isolate *isolate, Callback callback)
Definition: heap.cc:1530
constexpr const char * ToString(DeoptimizeKind kind)
Definition: globals.h:872
static GCType GetGCTypeFromGarbageCollector(GarbageCollector collector)
Definition: heap.cc:1061
@ kGCCallbackScheduleIdleGarbageCollection
Definition: v8-callbacks.h:186
@ kGCCallbackFlagForced
Definition: v8-callbacks.h:182
@ kGCCallbackFlagCollectAllAvailableGarbage
Definition: v8-callbacks.h:184
GCType
Applications can register callback functions which will be called before and after certain garbage co...
Definition: v8-callbacks.h:154
#define DCHECK_IMPLIES(v1, v2)
Definition: logging.h:492
#define V8_UNLIKELY(condition)
Definition: v8config.h:660

References always_allocate(), CallGCEpilogueCallbacks(), CallGCPrologueCallbacks(), CHECK, CHECK_IMPLIES, v8::internal::Isolate::CheckDetachedContextsAfterGC(), CheckHeapLimitReached(), CommittedOldGenerationMemory(), v8::internal::Isolate::CountUsage(), cpp_heap(), current_gc_flags_, current_or_last_garbage_collector_, v8::internal::DCHECK(), DCHECK_EQ, DCHECK_IMPLIES, deserialization_complete_, FatalProcessOutOfMemory(), v8::internal::CppHeap::FinishAtomicSweepingIfRunning(), v8::internal::CppHeap::From(), GarbageCollectionEpilogue(), GarbageCollectionPrologue(), GCFlagsForIncrementalMarking(), v8::internal::GetGCTypeFromGarbageCollector(), v8::internal::Isolate::global_handles(), heap_profiler(), incremental_marking(), initial_max_old_generation_size_, initial_max_old_generation_size_threshold_, v8::internal::anonymous_namespace{heap.cc}::InvokeExternalCallbacks(), v8::internal::GlobalHandles::InvokeSecondPassPhantomCallbacks(), is_current_gc_for_heap_profiler_, is_current_gc_forced_, is_full_gc_during_loading_, isolate(), isolate_, v8::internal::Isolate::IsOnCentralStack(), IsYoungGenerationCollector(), v8::internal::kFinalizeMinorMSForMajorGC, v8::Isolate::kForcedGC, v8::kGCCallbackFlagCollectAllAvailableGarbage, v8::kGCCallbackFlagForced, v8::kGCCallbackScheduleIdleGarbageCollection, v8::internal::kNoFlags, kNone, v8::internal::kYes, main_thread_local_heap(), v8::internal::MARK_COMPACTOR, max_old_generation_size(), memory_reducer_, v8::internal::MINOR_MARK_SWEEPER, v8::internal::GCTracer::RecordGCPhasesInfo::mode(), ms_count_, v8::internal::NEW_SPACE, v8::base::TimeTicks::Now(), OldGenerationSizeOfObjects(), PerformGarbageCollection(), v8::internal::GlobalHandles::PostGarbageCollectionProcessing(), RecomputeLimits(), v8::internal::GCTracer::RecordGCPhasesHistograms(), resize_new_space_mode_, v8::internal::SCAVENGER, SelectGarbageCollector(), heap::base::Stack::SetMarkerIfNeededAndCallback(), SetOldGenerationAndGlobalMaximumSize(), ShouldOptimizeForLoadTime(), ShouldReduceMemory(), space(), stack(), StartIncrementalMarkingIfAllocationLimitIsReached(), StartMinorMSConcurrentMarkingIfNeeded(), v8::internal::GCTracer::StartObservablePause(), v8::internal::GCTracer::StopAtomicPause(), v8::internal::GCTracer::StopFullCycleIfFinished(), v8::internal::GCTracer::StopObservablePause(), v8::internal::GCTracer::StopYoungCycleIfFinished(), v8::internal::TAKE_TIME, v8::internal::ToString(), TRACE_EVENT0, v8::internal::GCTracer::RecordGCPhasesInfo::trace_event_name(), tracer(), v8::Isolate::TryGetCurrent(), v8::internal::GCTracer::RecordGCPhasesInfo::type_priority_timer(), v8::internal::GCTracer::RecordGCPhasesInfo::type_timer(), update_allocation_limits_after_loading_, v8::internal::v8_flags, V8_UNLIKELY, and v8::internal::HeapProfiler::WriteSnapshotToDiskAfterGC().

Referenced by AllocateExternalBackingStore(), CollectAllAvailableGarbage(), CollectAllGarbage(), v8::internal::HeapAllocator::CollectGarbage(), CollectGarbageWithRetry(), HandleGCRequest(), and StartTearDown().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CollectGarbageForBackground()

void v8::internal::Heap::CollectGarbageForBackground ( LocalHeap local_heap)

Definition at line 2232 of file heap.cc.

2232  {
2233  CHECK(local_heap->is_main_thread());
2237 }
V8_EXPORT_PRIVATE void CollectGarbageWithRetry(AllocationSpace space, GCFlags gc_flags, GarbageCollectionReason gc_reason, const GCCallbackFlags gc_callback_flags)
Definition: heap.cc:2198

References CHECK, CollectGarbageWithRetry(), current_gc_callback_flags_, current_gc_flags_, v8::internal::LocalHeap::is_main_thread(), v8::internal::kBackgroundAllocationFailure, and v8::internal::OLD_SPACE.

Referenced by v8::internal::LocalHeap::ParkSlowPath(), v8::internal::LocalHeap::SafepointSlowPath(), and v8::internal::LocalHeap::UnparkSlowPath().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CollectGarbageFromAnyThread()

bool v8::internal::Heap::CollectGarbageFromAnyThread ( LocalHeap local_heap,
GarbageCollectionReason  gc_reason = GarbageCollectionReason::kBackgroundAllocationFailure 
)

Definition at line 2498 of file heap.cc.

2499  {
2500  DCHECK(local_heap->IsRunning());
2501 
2502  if (isolate() == local_heap->heap()->isolate() &&
2503  local_heap->is_main_thread()) {
2505  return true;
2506  } else {
2507  if (!collection_barrier_->TryRequestGC()) return false;
2508 
2509  const LocalHeap::ThreadState old_state =
2511 
2512  if (old_state.IsRunning()) {
2513  const bool performed_gc =
2514  collection_barrier_->AwaitCollectionBackground(local_heap);
2515  return performed_gc;
2516  } else {
2517  DCHECK(old_state.IsParked());
2518  return false;
2519  }
2520  }
2521 }
std::unique_ptr< CollectionBarrier > collection_barrier_
Definition: heap.h:2433
AtomicThreadState state_
Definition: local-heap.h:385

References CollectAllGarbage(), collection_barrier_, current_gc_callback_flags_, current_gc_flags_, v8::internal::DCHECK(), v8::internal::LocalHeap::heap(), v8::internal::LocalHeap::is_main_thread(), isolate(), v8::internal::LocalHeap::ThreadState::IsParked(), v8::internal::LocalHeap::IsRunning(), v8::internal::LocalHeap::ThreadState::IsRunning(), main_thread_local_heap(), v8::internal::LocalHeap::AtomicThreadState::SetCollectionRequested(), and v8::internal::LocalHeap::state_.

Referenced by v8::internal::HeapAllocator::CollectAllAvailableGarbage(), v8::internal::HeapAllocator::CollectGarbage(), and CollectGarbageShared().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CollectGarbageOnMemoryPressure()

void v8::internal::Heap::CollectGarbageOnMemoryPressure ( )
private

Definition at line 4294 of file heap.cc.

4294  {
4295  const int kGarbageThresholdInBytes = 8 * MB;
4296  const double kGarbageThresholdAsFractionOfTotalMemory = 0.1;
4297  // This constant is the maximum response time in RAIL performance model.
4298  const double kMaxMemoryPressurePauseMs = 100;
4299 
4300  double start = MonotonicallyIncreasingTimeInMs();
4306 
4307  // Estimate how much memory we can free.
4308  int64_t potential_garbage =
4310  // If we can potentially free large amount of memory, then start GC right
4311  // away instead of waiting for memory reducer.
4312  if (potential_garbage >= kGarbageThresholdInBytes &&
4313  potential_garbage >=
4314  CommittedMemory() * kGarbageThresholdAsFractionOfTotalMemory) {
4315  // If we spent less than half of the time budget, then perform full GC
4316  // Otherwise, start incremental marking.
4317  if (end - start < kMaxMemoryPressurePauseMs / 2) {
4321  } else {
4322  if (v8_flags.incremental_marking && incremental_marking()->IsStopped()) {
4325  }
4326  }
4327  }
4328 }
uint64_t external_memory() const
Definition: heap-inl.h:67
V8_EXPORT_PRIVATE double MonotonicallyIncreasingTimeInMs() const
Definition: heap.cc:4244
V8_EXPORT_PRIVATE size_t SizeOfObjects()
Definition: heap.cc:1008

References CollectAllGarbage(), CommittedMemory(), EagerlyFreeExternalMemoryAndWasmCode(), v8::internal::compiler::end(), external_memory(), incremental_marking(), v8::kGCCallbackFlagCollectAllAvailableGarbage, v8::internal::kMemoryPressure, v8::internal::kReduceMemoryFootprint, v8::internal::MB, MonotonicallyIncreasingTimeInMs(), SizeOfObjects(), StartIncrementalMarking(), and v8::internal::v8_flags.

Referenced by CheckMemoryPressure().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CollectGarbageShared()

bool v8::internal::Heap::CollectGarbageShared ( LocalHeap local_heap,
GarbageCollectionReason  gc_reason 
)

Definition at line 2484 of file heap.cc.

2485  {
2486  DCHECK(isolate()->has_shared_space());
2487 
2490  FatalProcessOutOfMemory("GC during deserialization");
2491  }
2492 
2493  Isolate* shared_space_isolate = isolate()->shared_space_isolate();
2494  return shared_space_isolate->heap()->CollectGarbageFromAnyThread(local_heap,
2495  gc_reason);
2496 }
V8_EXPORT_PRIVATE bool CollectGarbageFromAnyThread(LocalHeap *local_heap, GarbageCollectionReason gc_reason=GarbageCollectionReason::kBackgroundAllocationFailure)
Definition: heap.cc:2498

References always_allocate(), CHECK, CollectGarbageFromAnyThread(), v8::internal::DCHECK(), deserialization_complete_, FatalProcessOutOfMemory(), v8::internal::Isolate::heap(), isolate(), v8::internal::Isolate::shared_space_isolate(), and V8_UNLIKELY.

Referenced by v8::internal::HeapAllocator::CollectAllAvailableGarbage(), and v8::internal::HeapAllocator::CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CollectGarbageWithRetry()

void v8::internal::Heap::CollectGarbageWithRetry ( AllocationSpace  space,
GCFlags  gc_flags,
GarbageCollectionReason  gc_reason,
const GCCallbackFlags  gc_callback_flags 
)

Definition at line 2198 of file heap.cc.

2200  {
2201  const auto perform_heap_limit_check = v8_flags.late_heap_limit_check
2204 
2205  if (space == NEW_SPACE) {
2206  DCHECK_EQ(GCFlags(), gc_flags);
2207 
2208  for (int i = 0; i < 2; i++) {
2209  CollectGarbage(NEW_SPACE, gc_reason, gc_callback_flags,
2210  perform_heap_limit_check);
2211 
2212  if (!ReachedHeapLimit()) {
2213  return;
2214  }
2215  }
2216  }
2217 
2218  for (int i = 0; i < 2; i++) {
2219  current_gc_flags_ = gc_flags;
2220  CollectGarbage(OLD_SPACE, gc_reason, gc_callback_flags,
2221  perform_heap_limit_check);
2223 
2224  if (!ReachedHeapLimit()) {
2225  return;
2226  }
2227  }
2228 
2230 }

References CollectAllAvailableGarbage(), CollectGarbage(), current_gc_flags_, DCHECK_EQ, v8::internal::kLastResort, v8::internal::kNo, v8::internal::kYes, v8::internal::NEW_SPACE, v8::internal::OLD_SPACE, ReachedHeapLimit(), space(), and v8::internal::v8_flags.

Referenced by CollectGarbageForBackground().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CollectionRequested()

bool v8::internal::Heap::CollectionRequested ( )

Definition at line 2194 of file heap.cc.

2194  {
2195  return collection_barrier_->WasGCRequested();
2196 }

References collection_barrier_.

Referenced by CheckCollectionRequested(), HandleGCRequest(), and ShouldExpandOldGenerationOnSlowAllocation().

+ Here is the caller graph for this function:

◆ CommittedMemory()

size_t v8::internal::Heap::CommittedMemory ( )

Definition at line 364 of file heap.cc.

364  {
365  if (!HasBeenSetUp()) return 0;
366 
367  size_t new_space_committed = new_space_ ? new_space_->CommittedMemory() : 0;
368  size_t new_lo_space_committed = new_lo_space_ ? new_lo_space_->Size() : 0;
369 
370  return new_space_committed + new_lo_space_committed +
372 }
NewSpace * new_space_
Definition: heap.h:2207

References CommittedOldGenerationMemory(), HasBeenSetUp(), new_lo_space_, new_space_, and v8::internal::LargeObjectSpace::Size().

Referenced by ActivateMemoryReducerIfNeededOnMainThread(), CollectGarbageOnMemoryPressure(), GarbageCollectionEpilogue(), PrintShortHeapStatistics(), and UpdateMaximumCommitted().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CommittedMemoryExecutable()

size_t v8::internal::Heap::CommittedMemoryExecutable ( )

Definition at line 385 of file heap.cc.

385  {
386  if (!HasBeenSetUp()) return 0;
387 
388  return static_cast<size_t>(memory_allocator()->SizeExecutable());
389 }

References HasBeenSetUp(), memory_allocator(), and v8::internal::MemoryAllocator::SizeExecutable().

+ Here is the call graph for this function:

◆ CommittedOldGenerationMemory()

size_t v8::internal::Heap::CommittedOldGenerationMemory ( )

Definition at line 348 of file heap.cc.

348  {
349  if (!HasBeenSetUp()) return 0;
350 
351  PagedSpaceIterator spaces(this);
352  size_t total = 0;
353  for (PagedSpace* space = spaces.Next(); space != nullptr;
354  space = spaces.Next()) {
355  total += space->CommittedMemory();
356  }
357  if (shared_lo_space_) {
358  total += shared_lo_space_->Size();
359  }
360  return total + lo_space_->Size() + code_lo_space_->Size() +
362 }
virtual size_t CommittedMemory() const
Definition: base-space.h:36
SharedLargeObjectSpace * shared_lo_space_
Definition: heap.h:2214
OldLargeObjectSpace * lo_space_
Definition: heap.h:2211

References code_lo_space_, v8::internal::BaseSpace::CommittedMemory(), HasBeenSetUp(), lo_space_, v8::internal::PagedSpaceIterator::Next(), shared_lo_space_, v8::internal::LargeObjectSpace::Size(), space(), and trusted_lo_space_.

Referenced by CollectGarbage(), CommittedMemory(), HasHighFragmentation(), and v8::internal::MemoryReducer::NotifyMarkCompact().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CommittedPhysicalMemory()

size_t v8::internal::Heap::CommittedPhysicalMemory ( )

Definition at line 374 of file heap.cc.

374  {
375  if (!HasBeenSetUp()) return 0;
376 
377  size_t total = 0;
378  for (SpaceIterator it(this); it.HasNext();) {
379  total += it.Next()->CommittedPhysicalMemory();
380  }
381 
382  return total;
383 }

References HasBeenSetUp(), and v8::internal::SpaceIterator::HasNext().

+ Here is the call graph for this function:

◆ CompactRetainedMaps()

void v8::internal::Heap::CompactRetainedMaps ( Tagged< WeakArrayList retained_maps)
private

Definition at line 6557 of file heap.cc.

6557  {
6558  int length = retained_maps->length();
6559  int new_length = 0;
6560  // This loop compacts the array by removing cleared weak cells.
6561  for (int i = 0; i < length; i += kRetainMapEntrySize) {
6562  Tagged<MaybeObject> maybe_object = retained_maps->Get(i);
6563  if (maybe_object.IsCleared()) {
6564  continue;
6565  }
6566 
6567  DCHECK(maybe_object.IsWeak());
6568 
6569  Tagged<MaybeObject> age = retained_maps->Get(i + 1);
6570  DCHECK(IsSmi(age));
6571  if (i != new_length) {
6572  retained_maps->Set(new_length, maybe_object);
6573  retained_maps->Set(new_length + 1, age);
6574  }
6575  new_length += kRetainMapEntrySize;
6576  }
6577  Tagged<HeapObject> undefined = ReadOnlyRoots(this).undefined_value();
6578  for (int i = new_length; i < length; i++) {
6579  retained_maps->Set(i, undefined);
6580  }
6581  if (new_length != length) retained_maps->set_length(new_length);
6582 }
constexpr bool IsSmi(TaggedImpl< kRefType, StorageType > obj)
Definition: objects.h:666

References v8::internal::DCHECK(), v8::internal::TaggedImpl< kRefType, StorageType >::IsCleared(), v8::internal::IsSmi(), v8::internal::TaggedImpl< kRefType, StorageType >::IsWeak(), kRetainMapEntrySize, v8::internal::length, ReadOnlyRoots, and v8::internal::undefined.

Referenced by AddRetainedMaps().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CompactWeakArrayLists()

void v8::internal::Heap::CompactWeakArrayLists ( )

Definition at line 6488 of file heap.cc.

6488  {
6489  // Find known PrototypeUsers and compact them.
6490  std::vector<Handle<PrototypeInfo>> prototype_infos;
6491  {
6492  HeapObjectIterator iterator(this);
6493  for (Tagged<HeapObject> o = iterator.Next(); !o.is_null();
6494  o = iterator.Next()) {
6495  if (IsPrototypeInfo(*o)) {
6496  Tagged<PrototypeInfo> prototype_info = Cast<PrototypeInfo>(o);
6497  if (IsWeakArrayList(prototype_info->prototype_users())) {
6498  prototype_infos.emplace_back(handle(prototype_info, isolate()));
6499  }
6500  }
6501  }
6502  }
6503  for (auto& prototype_info : prototype_infos) {
6504  DirectHandle<WeakArrayList> array(
6505  Cast<WeakArrayList>(prototype_info->prototype_users()), isolate());
6506  DCHECK(InOldSpace(*array) ||
6507  *array == ReadOnlyRoots(this).empty_weak_array_list());
6508  Tagged<WeakArrayList> new_array = PrototypeUsers::Compact(
6511  prototype_info->set_prototype_users(new_array);
6512  }
6513 
6514  // Find known WeakArrayLists and compact them.
6515  Handle<WeakArrayList> scripts(script_list(), isolate());
6516  DCHECK(InOldSpace(*scripts));
6517  scripts = CompactWeakArrayList(this, scripts, AllocationType::kOld);
6518  set_script_list(*scripts);
6519 }
bool InOldSpace(Tagged< Object > object)
Definition: heap-inl.h:275
friend class HeapObjectIterator
Definition: heap.h:2496
static void PrototypeRegistryCompactionCallback(Tagged< HeapObject > value, int old_index, int new_index)
Definition: js-objects.cc:4903
static Tagged< WeakArrayList > Compact(DirectHandle< WeakArrayList > array, Heap *heap, CompactionCallback callback, AllocationType allocation=AllocationType::kYoung)
Definition: objects.cc:3872
Handle< WeakArrayList > CompactWeakArrayList(Heap *heap, Handle< WeakArrayList > array, AllocationType allocation)
Definition: heap.cc:6458
IndirectHandle< T > handle(Tagged< T > object, Isolate *isolate)
Definition: handles-inl.h:72

References v8::internal::PrototypeUsers::Compact(), v8::internal::anonymous_namespace{heap.cc}::CompactWeakArrayList(), v8::internal::DCHECK(), v8::internal::handle(), InOldSpace(), v8::internal::Tagged< HeapObject >::is_null(), isolate(), v8::internal::kOld, v8::internal::HeapObjectIterator::Next(), v8::internal::JSObject::PrototypeRegistryCompactionCallback(), and ReadOnlyRoots.

Referenced by v8::internal::SnapshotCreatorImpl::CreateBlob().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CompleteSweepingFull()

void v8::internal::Heap::CompleteSweepingFull ( )

Definition at line 2022 of file heap.cc.

2022  {
2024 
2027  !CppHeap::From(cpp_heap())->sweeper().IsSweepingInProgress());
2028  DCHECK(!tracer()->IsSweepingInProgress());
2029 }
bool sweeping_in_progress() const
Definition: heap.h:1591
V8_EXPORT_PRIVATE void EnsureSweepingCompleted(SweepingForcedFinalizationMode mode)
Definition: heap.cc:7474
Sweeper * sweeper()
Definition: heap.h:864

References cpp_heap(), v8::internal::DCHECK(), DCHECK_IMPLIES, EnsureSweepingCompleted(), v8::internal::CppHeap::From(), kUnifiedHeap, sweeper(), sweeping_in_progress(), and tracer().

Referenced by v8::internal::HeapSnapshotGenerator::GenerateSnapshot(), PerformGarbageCollection(), StartIncrementalMarking(), and StartTearDown().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CompleteSweepingYoung()

void v8::internal::Heap::CompleteSweepingYoung ( )

Definition at line 2523 of file heap.cc.

2523  {
2524  DCHECK(!v8_flags.sticky_mark_bits);
2525 
2526  // If sweeping is in progress and there are no sweeper tasks running, finish
2527  // the sweeping here, to avoid having to pause and resume during the young
2528  // generation GC.
2530 
2532 
2533 #if defined(CPPGC_YOUNG_GENERATION)
2534  // Always complete sweeping if young generation is enabled.
2535  if (cpp_heap()) {
2536  if (auto* iheap = CppHeap::From(cpp_heap());
2537  iheap->generational_gc_supported())
2538  iheap->FinishSweepingIfRunning();
2539  }
2540 #endif // defined(CPPGC_YOUNG_GENERATION)
2541 }
bool generational_gc_supported() const
Definition: heap-base.h:218
void FinishSweepingIfOutOfWork()
Definition: heap.cc:7456
void EnsureYoungSweepingCompleted()
Definition: heap.cc:7555

References cpp_heap(), v8::internal::DCHECK(), EnsureYoungSweepingCompleted(), FinishSweepingIfOutOfWork(), v8::internal::CppHeap::From(), cppgc::internal::HeapBase::generational_gc_supported(), and v8::internal::v8_flags.

Referenced by PerformGarbageCollection(), and StartIncrementalMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ComputeMutatorUtilization()

double v8::internal::Heap::ComputeMutatorUtilization ( const char *  tag,
double  mutator_speed,
std::optional< double >  gc_speed 
)
private

Definition at line 3804 of file heap.cc.

3805  {
3806  double result = ComputeMutatorUtilizationImpl(mutator_speed, gc_speed);
3807  if (v8_flags.trace_mutator_utilization) {
3808  isolate()->PrintWithTimestamp(
3809  "%s mutator utilization = %.3f ("
3810  "mutator_speed=%.f, gc_speed=%.f)\n",
3811  tag, result, mutator_speed, gc_speed.value_or(0));
3812  }
3813  return result;
3814 }
double ComputeMutatorUtilizationImpl(double mutator_speed, std::optional< double > gc_speed)
Definition: heap.cc:3786

References v8::internal::anonymous_namespace{heap.cc}::ComputeMutatorUtilizationImpl(), isolate(), v8::base::internal::result, and v8::internal::v8_flags.

Referenced by HasLowEmbedderAllocationRate(), HasLowOldGenerationAllocationRate(), and HasLowYoungGenerationAllocationRate().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ComputeNewAllocationLimits()

Heap::LimitsCompuatationResult v8::internal::Heap::ComputeNewAllocationLimits ( Heap heap)
staticprivate

Definition at line 2561 of file heap.cc.

2561  {
2562  DCHECK(!heap->using_initial_limit());
2563  heap->tracer()->RecordGCSizeCounters();
2564  const HeapGrowingMode mode = heap->CurrentHeapGrowingMode();
2565  std::optional<double> v8_gc_speed =
2566  heap->tracer()->OldGenerationSpeedInBytesPerMillisecond();
2567  double v8_mutator_speed =
2568  heap->tracer()->OldGenerationAllocationThroughputInBytesPerMillisecond();
2569  double v8_growing_factor = MemoryController<V8HeapTrait>::GrowingFactor(
2570  heap, heap->max_old_generation_size(), v8_gc_speed, v8_mutator_speed,
2571  mode);
2572  std::optional<double> embedder_gc_speed =
2573  heap->tracer()->EmbedderSpeedInBytesPerMillisecond();
2574  double embedder_speed =
2575  heap->tracer()->EmbedderAllocationThroughputInBytesPerMillisecond();
2576  double embedder_growing_factor =
2577  (embedder_gc_speed.has_value() && embedder_speed > 0)
2579  heap, heap->max_global_memory_size_, embedder_gc_speed,
2580  embedder_speed, mode)
2581  : 0;
2582 
2583  size_t new_space_capacity = heap->NewSpaceTargetCapacity();
2584 
2585  size_t new_old_generation_allocation_limit =
2587  heap, heap->OldGenerationConsumedBytesAtLastGC(),
2588  heap->OldGenerationConsumedBytesAtLastGC() * v8_growing_factor,
2589  heap->min_old_generation_size_, heap->max_old_generation_size(),
2590  new_space_capacity, mode);
2591 
2592  double global_growing_factor =
2593  std::max(v8_growing_factor, embedder_growing_factor);
2594  double external_growing_factor = std::min(
2595  global_growing_factor, GlobalMemoryTrait::kConservativeGrowingFactor);
2596  DCHECK_GT(global_growing_factor, 0);
2597  DCHECK_GT(external_growing_factor, 0);
2598  size_t new_global_allocation_limit =
2600  heap, heap->GlobalConsumedBytesAtLastGC(),
2601  (heap->OldGenerationConsumedBytesAtLastGC() +
2602  heap->embedder_size_at_last_gc_) *
2603  global_growing_factor +
2604  (v8_flags.external_memory_accounted_in_global_limit
2605  ? heap->external_memory_.low_since_mark_compact() *
2606  external_growing_factor
2607  : 0),
2608  heap->min_global_memory_size_, heap->max_global_memory_size_,
2609  new_space_capacity, mode);
2610 
2611  return {new_old_generation_allocation_limit, new_global_allocation_limit};
2612 }
static double GrowingFactor(Heap *heap, size_t max_heap_size, std::optional< double > gc_speed, double mutator_speed, Heap::HeapGrowingMode growing_mode)
static size_t BoundAllocationLimit(Heap *heap, size_t current_size, uint64_t limit, size_t min_size, size_t max_size, size_t new_space_capacity, Heap::HeapGrowingMode growing_mode)
too high values may cause the compiler to set high thresholds for inlining to as much as possible avoid inlined allocation of objects that cannot escape trace load stores from virtual maglev objects use TurboFan fast string builder analyze liveness of environment slots and zap dead values trace TurboFan load elimination emit data about basic block usage in builtins to this enable builtin reordering when run mksnapshot flag for emit warnings when applying builtin profile data verify register allocation in TurboFan randomly schedule instructions to stress dependency tracking enable store store elimination in TurboFan rewrite far to near simulate GC compiler thread race related to allow float parameters to be passed in simulator mode JS Wasm Run additional turbo_optimize_inlined_js_wasm_wrappers enables Turboshaft s StaticAssert and CheckTurboshaftTypeOf operations Wasm code into JS functions via the JS to Wasm wrappers are still inlined in TurboFan For controlling whether to at see turbo inline js wasm calls enable Turboshaft s loop unrolling enable an additional Turboshaft phase that performs optimizations based on type information enable Turbolev features that we want to ship in the not too far future trace individual Turboshaft reduction steps trace intermediate Turboshaft reduction steps trace Turboshaft s if else to switch reducer invocation count threshold for early optimization Enables optimizations which favor memory size over execution speed Enables sampling allocation profiler with X as a sample interval min size of a semi the new space consists of two semi spaces max size of the preconfigured old space Collect garbage after Collect garbage after keeps maps alive for< n > old space garbage collections print one detailed trace line in allocation gc speed threshold for starting incremental marking via a task in percent of available threshold for starting incremental marking immediately in percent of available Use a single schedule for determining a marking schedule between JS and C objects schedules the minor GC task with kUserVisible priority max worker number of concurrent for NumberOfWorkerThreads start background threads that allocate memory concurrent_array_buffer_sweeping use parallel threads to clear weak refs in the atomic pause trace progress of the incremental marking trace object counts and memory usage report a tick only when allocated zone memory changes by this amount TracingFlags::gc_stats TracingFlags::gc_stats track native contexts that are expected to be garbage collected verify heap pointers before and after GC memory reducer runs GC with ReduceMemoryFootprint flag Maximum number of memory reducer GCs scheduled Old gen GC speed is computed directly from gc tracer counters Perform compaction on full GCs based on V8 s default heuristics Perform compaction on every full GC Perform code space compaction when finalizing a full GC with stack Stress GC compaction to flush out bugs with moving objects flush of baseline code when it has not been executed recently Use time base code flushing instead of age Use a progress bar to scan large objects in increments when incremental marking is active force incremental marking for small heaps and run it more often Release pooled large pages after X seconds prints number of allocations and enables analysis mode for gc fuzz e g stress stress scavenge force scavenge at random points between and reclaim otherwise unreachable unmodified wrapper objects when possible discard the memory pool before invoking the GC on memory pressure or last resort GCs Delay before memory reducer start virtual randomize memory reservations by ignoring any hints passed when allocating pages use incremental marking for CppHeap cppheap_concurrent_marking c value for membalancer A special constant to balance between memory and space tradeoff The smaller the more memory it uses enable use of SSE4 instructions if available enable use of SAHF instruction if enable use of AVX VNNI instructions if available enable use of POPCNT instruction if available force all emitted branches to be in long mode(MIPS/PPC only)") DEFINE_BOOL(partial_constant_pool
Definition: platform.h:72
#define DCHECK_GT(v1, v2)
Definition: logging.h:486
static constexpr double kConservativeGrowingFactor

References v8::internal::MemoryController< Trait >::BoundAllocationLimit(), v8::internal::DCHECK(), DCHECK_GT, v8::internal::MemoryController< Trait >::GrowingFactor(), v8::internal::BaseControllerTrait::kConservativeGrowingFactor, mode(), and v8::internal::v8_flags.

Referenced by EnsureSweepingCompleted(), RecomputeLimits(), and RecomputeLimitsAfterLoadingIfNeeded().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ concurrent_marking()

◆ ConfigureHeap()

void v8::internal::Heap::ConfigureHeap ( const v8::ResourceConstraints constraints,
v8::CppHeap cpp_heap 
)

Definition at line 5109 of file heap.cc.

5110  {
5111  CHECK(!configured_);
5112  // Initialize max_semi_space_size_.
5113  {
5115  if (constraints.max_young_generation_size_in_bytes() > 0) {
5117  constraints.max_young_generation_size_in_bytes());
5118  }
5119  if (v8_flags.max_semi_space_size > 0) {
5121  static_cast<size_t>(v8_flags.max_semi_space_size) * MB;
5122  } else if (v8_flags.max_heap_size > 0) {
5123  size_t max_heap_size = static_cast<size_t>(v8_flags.max_heap_size) * MB;
5124  size_t young_generation_size, old_generation_size;
5125  if (v8_flags.max_old_space_size > 0) {
5126  old_generation_size =
5127  static_cast<size_t>(v8_flags.max_old_space_size) * MB;
5128  young_generation_size = max_heap_size > old_generation_size
5129  ? max_heap_size - old_generation_size
5130  : 0;
5131  } else {
5132  GenerationSizesFromHeapSize(max_heap_size, &young_generation_size,
5133  &old_generation_size);
5134  }
5136  SemiSpaceSizeFromYoungGenerationSize(young_generation_size);
5137  }
5138  if (v8_flags.stress_compaction) {
5139  // This will cause more frequent GCs when stressing.
5141  }
5142  if (!v8_flags.minor_ms) {
5143  // TODO(dinfuehr): Rounding to a power of 2 is technically no longer
5144  // needed but yields best performance on Pixel2.
5146  static_cast<size_t>(base::bits::RoundUpToPowerOfTwo64(
5147  static_cast<uint64_t>(max_semi_space_size_)));
5148  }
5152  RoundDown<PageMetadata::kPageSize>(max_semi_space_size_);
5153  }
5154 
5155  // Initialize max_old_generation_size_ and max_global_memory_.
5156  {
5157  size_t max_old_generation_size = 700ul * (kSystemPointerSize / 4) * MB;
5158  if (constraints.max_old_generation_size_in_bytes() > 0) {
5160  }
5161  if (v8_flags.max_old_space_size > 0) {
5163  static_cast<size_t>(v8_flags.max_old_space_size) * MB;
5164  } else if (v8_flags.max_heap_size > 0) {
5165  size_t max_heap_size = static_cast<size_t>(v8_flags.max_heap_size) * MB;
5166  size_t young_generation_size =
5168  max_old_generation_size = max_heap_size > young_generation_size
5169  ? max_heap_size - young_generation_size
5170  : 0;
5171  }
5177  RoundDown<PageMetadata::kPageSize>(max_old_generation_size);
5178 
5180  }
5181 
5182  CHECK_IMPLIES(
5183  v8_flags.max_heap_size > 0,
5184  v8_flags.max_semi_space_size == 0 || v8_flags.max_old_space_size == 0);
5185 
5186  // Initialize min_semispace_size_.
5187  {
5189  if (!v8_flags.optimize_for_size) {
5190  // Start with at least 1*MB semi-space on machines with a lot of memory.
5192  std::max(min_semi_space_size_, static_cast<size_t>(1 * MB));
5193  }
5195  if (v8_flags.min_semi_space_size > 0) {
5197  static_cast<size_t>(v8_flags.min_semi_space_size) * MB;
5198  }
5201  RoundDown<PageMetadata::kPageSize>(min_semi_space_size_);
5202  }
5203 
5204  // Initialize initial_semispace_size_.
5205  {
5207  if (constraints.initial_young_generation_size_in_bytes() > 0) {
5210  }
5211  if (v8_flags.initial_heap_size > 0) {
5212  size_t young_generation, old_generation;
5214  static_cast<size_t>(v8_flags.initial_heap_size) * MB,
5215  &young_generation, &old_generation);
5217  SemiSpaceSizeFromYoungGenerationSize(young_generation);
5218  }
5224  RoundDown<PageMetadata::kPageSize>(initial_semispace_size_);
5225  }
5226 
5229 
5230  if (v8_flags.lazy_new_space_shrinking) {
5232  }
5233 
5234  // Initialize initial_old_space_size_.
5235  std::optional<size_t> initial_old_generation_size =
5236  [&]() -> std::optional<size_t> {
5237  if (v8_flags.initial_old_space_size > 0) {
5238  return static_cast<size_t>(v8_flags.initial_old_space_size) * MB;
5239  }
5240  if (v8_flags.initial_heap_size > 0) {
5241  size_t initial_heap_size =
5242  static_cast<size_t>(v8_flags.initial_heap_size) * MB;
5243  size_t young_generation_size =
5245  return initial_heap_size > young_generation_size
5246  ? initial_heap_size - young_generation_size
5247  : 0;
5248  }
5249  return std::nullopt;
5250  }();
5252  if (initial_old_generation_size.has_value()) {
5254  initial_old_generation_size_ = *initial_old_generation_size;
5255  } else if (v8_flags.preconfigured_old_space_size > 0) {
5258  static_cast<size_t>(v8_flags.preconfigured_old_space_size) * MB;
5260  } else {
5262  if (constraints.initial_old_generation_size_in_bytes() > 0) {
5265  }
5266  }
5270  RoundDown<PageMetadata::kPageSize>(initial_old_generation_size_);
5272  // If the embedder pre-configures the initial old generation size,
5273  // then allow V8 to skip full GCs below that threshold.
5277  }
5280 
5281  // We rely on being able to allocate new arrays in paged spaces.
5283  (JSArray::kHeaderSize +
5285  ALIGN_TO_ALLOCATION_ALIGNMENT(sizeof(AllocationMemento))));
5286 
5288 
5289  heap_profiler_ = std::make_unique<HeapProfiler>(this);
5290  if (cpp_heap) {
5293  }
5294 
5295  configured_ = true;
5296 }
size_t max_old_generation_size_in_bytes() const
The maximum size of the old generation.
Definition: v8-isolate.h:124
size_t code_range_size_in_bytes() const
The amount of virtual memory reserved for generated code.
Definition: v8-isolate.h:114
size_t initial_young_generation_size_in_bytes() const
Definition: v8-isolate.h:150
size_t max_young_generation_size_in_bytes() const
The maximum size of the young generation, which consists of two semi-spaces and a large object space.
Definition: v8-isolate.h:136
size_t initial_old_generation_size_in_bytes() const
Definition: v8-isolate.h:143
static V8_EXPORT_PRIVATE size_t AllocatorLimitOnMaxOldGenerationSize()
Definition: heap.cc:278
size_t min_old_generation_size_
Definition: heap.h:2149
size_t initial_old_generation_size_
Definition: heap.h:2160
bool preconfigured_old_generation_size_
Definition: heap.h:2178
size_t max_semi_space_size_
Definition: heap.h:2144
static V8_EXPORT_PRIVATE size_t SemiSpaceSizeFromYoungGenerationSize(size_t young_generation_size)
Definition: heap.cc:320
std::unique_ptr< HeapProfiler > heap_profiler_
Definition: heap.h:2348
static V8_EXPORT_PRIVATE size_t DefaultInitialOldGenerationSize()
Definition: heap.cc:5047
static size_t DefaultMinSemiSpaceSize()
Definition: heap.cc:5057
void AttachCppHeap(v8::CppHeap *cpp_heap)
Definition: heap.cc:6234
bool initial_size_overwritten_
Definition: heap.h:2173
size_t min_global_memory_size_
Definition: heap.h:2155
size_t initial_semispace_size_
Definition: heap.h:2146
size_t min_semi_space_size_
Definition: heap.h:2145
static V8_EXPORT_PRIVATE size_t MinOldGenerationSize()
Definition: heap.cc:272
bool configured_
Definition: heap.h:2413
static V8_EXPORT_PRIVATE void GenerationSizesFromHeapSize(size_t heap_size, size_t *young_generation_size, size_t *old_generation_size)
Definition: heap.cc:244
void ResetOldGenerationAndGlobalAllocationLimit()
Definition: heap.cc:1586
size_t code_range_size_
Definition: heap.h:2143
static const int kInitialMaxFastElementArray
Definition: js-array.h:148
#define ALIGN_TO_ALLOCATION_ALIGNMENT(value)
Definition: globals.h:1811
constexpr V8_BASE_EXPORT uint64_t RoundUpToPowerOfTwo64(uint64_t value)
Definition: bits.h:235
size_t GlobalMemorySizeFromV8Size(size_t v8_size)
Definition: heap.cc:1543
constexpr int kMaxRegularHeapObjectSize
Definition: globals.h:671
constexpr int kSystemPointerSize
Definition: globals.h:411
#define DCHECK_GE(v1, v2)
Definition: logging.h:487

References ALIGN_TO_ALLOCATION_ALIGNMENT, AllocatorLimitOnMaxOldGenerationSize(), AttachCppHeap(), CHECK, CHECK_IMPLIES, code_range_size_, v8::ResourceConstraints::code_range_size_in_bytes(), configured_, cpp_heap(), v8::internal::DCHECK(), DCHECK_GE, DCHECK_LE, DefaultInitialOldGenerationSize(), DefaultMaxSemiSpaceSize(), DefaultMinSemiSpaceSize(), v8::internal::CppHeap::From(), GenerationSizesFromHeapSize(), v8::internal::anonymous_namespace{heap.cc}::GlobalMemorySizeFromV8Size(), heap_profiler_, initial_max_old_generation_size_, initial_old_generation_size_, v8::ResourceConstraints::initial_old_generation_size_in_bytes(), initial_semispace_size_, initial_size_overwritten_, v8::ResourceConstraints::initial_young_generation_size_in_bytes(), v8::internal::JSArray::kInitialMaxFastElementArray, v8::internal::kMaxRegularHeapObjectSize, v8::internal::kSystemPointerSize, max_old_generation_size(), v8::ResourceConstraints::max_old_generation_size_in_bytes(), max_semi_space_size_, v8::ResourceConstraints::max_young_generation_size_in_bytes(), v8::internal::MB, min_global_memory_size_, min_old_generation_size_, min_semi_space_size_, MinOldGenerationSize(), owning_cpp_heap_, preconfigured_old_generation_size_, ResetOldGenerationAndGlobalAllocationLimit(), v8::base::bits::RoundUpToPowerOfTwo64(), SemiSpaceSizeFromYoungGenerationSize(), SetOldGenerationAndGlobalMaximumSize(), v8::internal::TaggedArrayBase< FixedArray, TaggedArrayShape >::SizeFor(), v8::internal::v8_flags, and YoungGenerationSizeFromSemiSpaceSize().

Referenced by ConfigureHeapDefault(), v8::Isolate::Initialize(), and v8::internal::SnapshotCreatorImpl::SnapshotCreatorImpl().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ConfigureHeapDefault()

void v8::internal::Heap::ConfigureHeapDefault ( )

Definition at line 5320 of file heap.cc.

5320  {
5321  v8::ResourceConstraints constraints;
5322  ConfigureHeap(constraints, nullptr);
5323 }
A set of constraints that specifies the limits of the runtime's memory use.
Definition: v8-isolate.h:64
void ConfigureHeap(const v8::ResourceConstraints &constraints, v8::CppHeap *cpp_heap)
Definition: heap.cc:5109

References ConfigureHeap().

Referenced by SetUp().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ConservativeStackScanningModeForMajorGC()

StackScanMode v8::internal::Heap::ConservativeStackScanningModeForMajorGC ( ) const
inline

Definition at line 387 of file heap.h.

387  {
388  if (v8_flags.conservative_stack_scanning) {
389  return StackScanMode::kFull;
390  }
391  if (selective_stack_scan_start_address_.has_value()) {
393  }
394  return StackScanMode::kNone;
395  }
std::optional< const void * > selective_stack_scan_start_address_
Definition: heap.h:2480

References v8::internal::v8_flags.

Referenced by IterateConservativeStackRoots(), and v8::internal::MarkCompactCollector::StartCompaction().

+ Here is the caller graph for this function:

◆ ConservativeStackScanningModeForMinorGC()

StackScanMode v8::internal::Heap::ConservativeStackScanningModeForMinorGC ( ) const
inline

Definition at line 378 of file heap.h.

378  {
379  if (v8_flags.scavenger_conservative_object_pinning) {
380  return StackScanMode::kFull;
381  }
382  if (selective_stack_scan_start_address_.has_value()) {
384  }
385  return StackScanMode::kNone;
386  }

References v8::internal::v8_flags.

Referenced by v8::internal::ScavengerCollector::CollectGarbage(), and v8::internal::anonymous_namespace{scavenger.cc}::YoungGenerationConservativeStackVisitor::YoungGenerationConservativeStackVisitor().

+ Here is the caller graph for this function:

◆ Contains()

bool v8::internal::Heap::Contains ( Tagged< HeapObject value) const

Definition at line 4481 of file heap.cc.

4481  {
4483  return false;
4484  }
4485  if (memory_allocator()->IsOutsideAllocatedSpace(value.address())) {
4486  return false;
4487  }
4488 
4489  if (!HasBeenSetUp()) return false;
4490 
4491  return (new_space_ && new_space_->Contains(value)) ||
4502 }
SharedTrustedSpace * shared_trusted_space_
Definition: heap.h:2217
SharedTrustedLargeObjectSpace * shared_trusted_lo_space_
Definition: heap.h:2219
SharedSpace * shared_space_
Definition: heap.h:2210
bool Contains(Tagged< HeapObject > obj) const
bool Contains(Tagged< Object > o) const
bool Contains(Address a) const
static V8_EXPORT_PRIVATE bool Contains(Address address)
return value
Definition: map-inl.h:912

References code_lo_space_, code_space_, v8::internal::PagedSpaceBase::Contains(), v8::internal::ReadOnlyHeap::Contains(), v8::internal::LargeObjectSpace::Contains(), v8::internal::NewSpace::Contains(), HasBeenSetUp(), lo_space_, memory_allocator(), new_lo_space_, new_space_, old_space_, shared_lo_space_, shared_space_, shared_trusted_lo_space_, shared_trusted_space_, trusted_lo_space_, trusted_space_, and v8::internal::value.

Referenced by v8::internal::SnapshotCreatorImpl::AddContext(), v8::internal::SnapshotCreatorImpl::AddData(), v8::internal::MinorMarkSweepCollector::DrainMarkingWorklist(), v8::internal::MarkCompactCollector::MarkObject(), v8::internal::MarkCompactCollector::MarkRootObject(), and v8::internal::SnapshotCreatorImpl::SetDefaultContext().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ContainsCode()

bool v8::internal::Heap::ContainsCode ( Tagged< HeapObject value) const

Definition at line 4504 of file heap.cc.

4504  {
4505  // TODO(v8:11880): support external code space.
4506  if (memory_allocator()->IsOutsideAllocatedSpace(value.address(),
4507  EXECUTABLE)) {
4508  return false;
4509  }
4510  return HasBeenSetUp() &&
4512 }

References code_lo_space_, code_space_, v8::internal::PagedSpaceBase::Contains(), v8::internal::LargeObjectSpace::Contains(), v8::internal::EXECUTABLE, HasBeenSetUp(), memory_allocator(), and v8::internal::value.

+ Here is the call graph for this function:

◆ CopyBlock()

void v8::internal::Heap::CopyBlock ( Address  dst,
Address  src,
int  byte_size 
)
inlinestatic

Definition at line 292 of file heap-inl.h.

292  {
293  DCHECK(IsAligned(byte_size, kTaggedSize));
294  CopyTagged(dst, src, static_cast<size_t>(byte_size / kTaggedSize));
295 }
constexpr int kTaggedSize
Definition: globals.h:533
void CopyTagged(Address dst, const Address src, size_t num_tagged)
Definition: slots-inl.h:490
constexpr bool IsAligned(T value, U alignment)
Definition: macros.h:403

References v8::internal::CopyTagged(), v8::internal::DCHECK(), IsAligned(), and v8::internal::kTaggedSize.

Referenced by v8::internal::Factory::CopyFixedDoubleArray(), v8::internal::Factory::CopyJSObjectWithAllocationSite(), v8::internal::anonymous_namespace{read-only-promotion.cc}::ReadOnlyPromotionImpl::CopyToReadOnlyHeap(), v8::internal::Scavenger::MigrateObject(), and v8::internal::EvacuateVisitorBase::RawMigrateObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CopyRange() [1/2]

template<typename TSlot >
void v8::internal::Heap::CopyRange ( Tagged< HeapObject dst_object,
const TSlot  dst_slot,
const TSlot  src_slot,
int  len,
WriteBarrierMode  mode 
)

Definition at line 2164 of file heap.cc.

2165  {
2166  // Ensure ranges do not overlap.
2167  DCHECK(TSlot(dst_slot + len) <= src_slot || (src_slot + len) <= dst_slot);
2168 
2169  const auto atomic_callback = [](TSlot dst_slot, TSlot dst_end, TSlot src_slot,
2170  int len) {
2171  const AtomicSlot atomic_dst_end(dst_end);
2172  AtomicSlot dst(dst_slot);
2173  AtomicSlot src(src_slot);
2174  while (dst < atomic_dst_end) {
2175  *dst = *src;
2176  ++dst;
2177  ++src;
2178  }
2179  };
2180  const auto non_atomic_callback = [](TSlot dst_slot, TSlot src_slot, int len) {
2181  MemCopy(dst_slot.ToVoidPtr(), src_slot.ToVoidPtr(), len * kTaggedSize);
2182  };
2183  CopyOrMoveRangeImpl(this, dst_object, dst_slot, src_slot, len, mode,
2184  atomic_callback, non_atomic_callback);
2185 }
void CopyOrMoveRangeImpl(Heap *heap, Tagged< HeapObject > dst_object, const TSlot dst_slot, const TSlot src_slot, int len, WriteBarrierMode mode, AtomicOp atomic_op, NonAtomicOp non_atomic_op)
Definition: heap.cc:2081
void MemCopy(void *dest, const void *src, size_t size)
Definition: memcopy.h:124

References v8::internal::anonymous_namespace{heap.cc}::CopyOrMoveRangeImpl(), v8::internal::DCHECK(), v8::internal::kTaggedSize, v8::internal::MemCopy(), and mode().

+ Here is the call graph for this function:

◆ CopyRange() [2/2]

template<typename TSlot >
V8_EXPORT_PRIVATE void v8::internal::Heap::CopyRange ( Tagged< HeapObject dst_object,
TSlot  dst_slot,
TSlot  src_slot,
int  len,
WriteBarrierMode  mode 
)

◆ cpp_heap()

◆ CreateDefaultMeasureMemoryDelegate()

std::unique_ptr< v8::MeasureMemoryDelegate > v8::internal::Heap::CreateDefaultMeasureMemoryDelegate ( v8::Local< v8::Context context,
v8::Local< v8::Promise::Resolver promise,
v8::MeasureMemoryMode  mode 
)

Definition at line 4440 of file heap.cc.

4442  {
4444  reinterpret_cast<v8::Isolate*>(isolate_), context, promise, mode);
4445 }
static std::unique_ptr< v8::MeasureMemoryDelegate > DefaultDelegate(v8::Isolate *isolate, v8::Local< v8::Context > context, v8::Local< v8::Promise::Resolver > promise, v8::MeasureMemoryMode mode)

References v8::internal::MemoryMeasurement::DefaultDelegate(), isolate_, and mode().

Referenced by v8::MeasureMemoryDelegate::Default().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateEarlyReadOnlyMapsAndObjects()

bool v8::internal::Heap::CreateEarlyReadOnlyMapsAndObjects ( )
private

Definition at line 342 of file setup-heap-internal.cc.

342  {
343  // Setup maps and objects which are used often, or used in
344  // CreateImportantReadOnlyObjects.
345  ReadOnlyRoots roots(this);
346 
347  // First create the following, in the following order:
348  // - Undefined value
349  // - Null value
350  // - Empty string
351  // - False value
352  // - True value
353  // - /String maps
354  // \...
355  // - Symbol map
356  // - Meta-map
357  // - Undefined map
358  // - Null map
359  // - Boolean map
360  //
361  // This is so that:
362  // 1. The falsy values are the first in the space, allowing ToBoolean false
363  // checks to be a single less-than.
364  // 2. The true value is immediately after the falsy values, so that we can
365  // use a single compare's condition flags to check both falsy and true.
366  // 3. The string maps are all together, and are the first maps, allowing
367  // them to be checked with a single less-than if we know we have a map.
368  // 4. The symbol map is with the string maps, for similarly fast Name
369  // checks.
370 
371  Tagged<HeapObject> obj;
372  {
373  // We're a bit loose with raw pointers here for readability -- this is all
374  // guaranteed to be safe anyway since the allocations can't cause a GC, so
375  // disable gcmole in this range.
376  DisableGCMole no_gc_mole;
377 
378  // First, set up the roots to all point to the right offset in the
379  // allocation folded allocation.
380 #define ALLOCATE_AND_SET_ROOT(Type, name, Size) \
381  { \
382  AllocationResult alloc = AllocateRaw(Size, AllocationType::kReadOnly); \
383  if (!alloc.To(&obj)) return false; \
384  } \
385  Tagged<Type> name = UncheckedCast<Type>(obj); \
386  set_##name(name)
387 
388  ALLOCATE_AND_SET_ROOT(Undefined, undefined_value, sizeof(Undefined));
389  ALLOCATE_AND_SET_ROOT(Null, null_value, sizeof(Null));
390  ALLOCATE_AND_SET_ROOT(SeqOneByteString, empty_string,
392  ALLOCATE_AND_SET_ROOT(False, false_value, sizeof(False));
393  ALLOCATE_AND_SET_ROOT(True, true_value, sizeof(True));
394 
395  for (const StringTypeInit& entry : kStringTypeTable) {
396  {
397  AllocationResult alloc =
399  if (!alloc.To(&obj)) return false;
400  }
401  Tagged<Map> map = UncheckedCast<Map>(obj);
402  roots_table()[entry.index] = map.ptr();
403  }
404  ALLOCATE_AND_SET_ROOT(Map, symbol_map, Map::kSize);
405 
406  ALLOCATE_AND_SET_ROOT(Map, meta_map, Map::kSize);
407  // Keep HeapNumber and Oddball maps together for cheap NumberOrOddball
408  // checks.
409  ALLOCATE_AND_SET_ROOT(Map, undefined_map, Map::kSize);
410  ALLOCATE_AND_SET_ROOT(Map, null_map, Map::kSize);
411  // Keep HeapNumber and Boolean maps together for cheap NumberOrBoolean
412  // checks.
413  ALLOCATE_AND_SET_ROOT(Map, boolean_map, Map::kSize);
414  // Keep HeapNumber and BigInt maps together for cheaper numerics checks.
415  ALLOCATE_AND_SET_ROOT(Map, heap_number_map, Map::kSize);
416  ALLOCATE_AND_SET_ROOT(Map, bigint_map, Map::kSize);
417  // Keep FreeSpace and filler maps together for cheap
418  // `IsFreeSpaceOrFiller()`.
419  ALLOCATE_AND_SET_ROOT(Map, free_space_map, Map::kSize);
420  ALLOCATE_AND_SET_ROOT(Map, one_pointer_filler_map, Map::kSize);
421  ALLOCATE_AND_SET_ROOT(Map, two_pointer_filler_map, Map::kSize);
422 
423 #undef ALLOCATE_AND_SET_ROOT
424 
425  // Then, initialise the initial maps.
426  InitializePartialMap(isolate(), meta_map, meta_map, MAP_TYPE, Map::kSize);
427  InitializePartialMap(isolate(), undefined_map, meta_map, ODDBALL_TYPE,
428  sizeof(Undefined));
429  InitializePartialMap(isolate(), null_map, meta_map, ODDBALL_TYPE,
430  sizeof(Null));
431  InitializePartialMap(isolate(), boolean_map, meta_map, ODDBALL_TYPE,
432  sizeof(Boolean));
433  boolean_map->SetConstructorFunctionIndex(Context::BOOLEAN_FUNCTION_INDEX);
434  InitializePartialMap(isolate(), heap_number_map, meta_map, HEAP_NUMBER_TYPE,
435  sizeof(HeapNumber));
436  heap_number_map->SetConstructorFunctionIndex(
437  Context::NUMBER_FUNCTION_INDEX);
438  InitializePartialMap(isolate(), bigint_map, meta_map, BIGINT_TYPE,
440  InitializePartialMap(isolate(), free_space_map, meta_map, FREE_SPACE_TYPE,
442  InitializePartialMap(isolate(), one_pointer_filler_map, meta_map,
443  FILLER_TYPE, kTaggedSize);
444  InitializePartialMap(isolate(), two_pointer_filler_map, meta_map,
445  FILLER_TYPE, 2 * kTaggedSize);
446 
447  for (const StringTypeInit& entry : kStringTypeTable) {
448  Tagged<Map> map = UncheckedCast<Map>(roots.object_at(entry.index));
449  InitializePartialMap(isolate(), map, meta_map, entry.type, entry.size);
450  map->SetConstructorFunctionIndex(Context::STRING_FUNCTION_INDEX);
451  // Strings change maps in-place (e.g., when internalizing them). Thus they
452  // are marked unstable to let the compilers not depend on them not
453  // changing.
454  map->mark_unstable();
455  }
456  InitializePartialMap(isolate(), symbol_map, meta_map, SYMBOL_TYPE,
457  sizeof(Symbol));
458  symbol_map->SetConstructorFunctionIndex(Context::SYMBOL_FUNCTION_INDEX);
459 
460  // Finally, initialise the non-map objects using those maps.
461  undefined_value->set_map_after_allocation(isolate(), undefined_map,
463  undefined_value->set_kind(Oddball::kUndefined);
464 
465  null_value->set_map_after_allocation(isolate(), null_map,
467  null_value->set_kind(Oddball::kNull);
468 
469  true_value->set_map_after_allocation(isolate(), boolean_map,
471  true_value->set_kind(Oddball::kTrue);
472 
473  false_value->set_map_after_allocation(isolate(), boolean_map,
475  false_value->set_kind(Oddball::kFalse);
476 
477  // The empty string is initialised with an empty hash despite being
478  // internalized -- this will be calculated once the hashseed is available.
479  // TODO(leszeks): Unify this initialisation with normal string
480  // initialisation.
481  empty_string->set_map_after_allocation(
482  isolate(), roots.unchecked_internalized_one_byte_string_map(),
484  empty_string->clear_padding_destructively(0);
485  empty_string->set_length(0);
486  empty_string->set_raw_hash_field(String::kEmptyHashField);
487  }
488 
489  // Now that the initial objects are allocated, we can start allocating other
490  // objects where the order matters less.
491 
492 #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \
493  { \
494  Tagged<Map> map; \
495  if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \
496  set_##field_name##_map(map); \
497  }
498 
499  { // Partial map allocation
500  ALLOCATE_PARTIAL_MAP(FIXED_ARRAY_TYPE, kVariableSizeSentinel, fixed_array);
501  ALLOCATE_PARTIAL_MAP(TRUSTED_FIXED_ARRAY_TYPE, kVariableSizeSentinel,
502  trusted_fixed_array);
503  ALLOCATE_PARTIAL_MAP(PROTECTED_FIXED_ARRAY_TYPE, kVariableSizeSentinel,
504  protected_fixed_array);
505  ALLOCATE_PARTIAL_MAP(WEAK_FIXED_ARRAY_TYPE, kVariableSizeSentinel,
506  weak_fixed_array);
507  ALLOCATE_PARTIAL_MAP(TRUSTED_WEAK_FIXED_ARRAY_TYPE, kVariableSizeSentinel,
508  trusted_weak_fixed_array);
509  ALLOCATE_PARTIAL_MAP(PROTECTED_WEAK_FIXED_ARRAY_TYPE, kVariableSizeSentinel,
510  protected_weak_fixed_array);
511  ALLOCATE_PARTIAL_MAP(WEAK_ARRAY_LIST_TYPE, kVariableSizeSentinel,
512  weak_array_list);
514  fixed_cow_array)
515  DCHECK_NE(roots.fixed_array_map(), roots.fixed_cow_array_map());
516 
517  ALLOCATE_PARTIAL_MAP(DESCRIPTOR_ARRAY_TYPE, kVariableSizeSentinel,
518  descriptor_array)
519 
520  ALLOCATE_PARTIAL_MAP(HOLE_TYPE, Hole::kSize, hole);
521 
522  // Some struct maps which we need for later dependencies
523  for (const StructInit& entry : kStructTable) {
524  if (!is_important_struct(entry.type)) continue;
525  Tagged<Map> map;
526  if (!AllocatePartialMap(entry.type, entry.size).To(&map)) return false;
527  roots_table()[entry.index] = map.ptr();
528  }
529  }
530 #undef ALLOCATE_PARTIAL_MAP
531 
532  {
533  AllocationResult alloc =
535  if (!alloc.To(&obj)) return false;
536  obj->set_map_after_allocation(isolate(), roots.fixed_array_map(),
538  Cast<FixedArray>(obj)->set_length(0);
539  }
540  set_empty_fixed_array(Cast<FixedArray>(obj));
541 
542  {
543  AllocationResult alloc =
545  if (!alloc.To(&obj)) return false;
546  obj->set_map_after_allocation(isolate(), roots.weak_fixed_array_map(),
548  Cast<WeakFixedArray>(obj)->set_length(0);
549  }
550  set_empty_weak_fixed_array(Cast<WeakFixedArray>(obj));
551 
552  {
553  AllocationResult allocation = AllocateRaw(WeakArrayList::SizeForCapacity(0),
555  if (!allocation.To(&obj)) return false;
556  obj->set_map_after_allocation(isolate(), roots.weak_array_list_map(),
558  Cast<WeakArrayList>(obj)->set_capacity(0);
559  Cast<WeakArrayList>(obj)->set_length(0);
560  }
561  set_empty_weak_array_list(Cast<WeakArrayList>(obj));
562 
563  DCHECK(!HeapLayout::InYoungGeneration(roots.undefined_value()));
564  {
565  AllocationResult allocation =
567  if (!allocation.To(&obj)) return false;
568  }
569  set_the_hole_value(Cast<Hole>(obj));
570 
571  // Set preliminary exception sentinel value before actually initializing it.
572  set_exception(Cast<Hole>(obj));
573 
574  // Allocate the empty enum cache.
575  {
576  AllocationResult allocation =
577  Allocate(roots_table().enum_cache_map(), AllocationType::kReadOnly);
578  if (!allocation.To(&obj)) return false;
579  }
580  set_empty_enum_cache(Cast<EnumCache>(obj));
581  Cast<EnumCache>(obj)->set_keys(roots.empty_fixed_array());
582  Cast<EnumCache>(obj)->set_indices(roots.empty_fixed_array());
583 
584  // Allocate the empty descriptor array.
585  {
587  if (!AllocateRaw(size, AllocationType::kReadOnly).To(&obj)) return false;
588  obj->set_map_after_allocation(isolate(), roots.descriptor_array_map(),
590  Tagged<DescriptorArray> array = Cast<DescriptorArray>(obj);
591  array->Initialize(roots.empty_enum_cache(), roots.undefined_value(), 0, 0,
593  array->set_fast_iterable(DescriptorArray::FastIterableState::kJsonFast);
594  }
595  set_empty_descriptor_array(Cast<DescriptorArray>(obj));
596 
597  // Fix the instance_descriptors for the existing maps.
598  FinalizePartialMap(roots.meta_map());
599  FinalizePartialMap(roots.fixed_array_map());
600  FinalizePartialMap(roots.trusted_fixed_array_map());
601  FinalizePartialMap(roots.protected_fixed_array_map());
602  FinalizePartialMap(roots.weak_fixed_array_map());
603  FinalizePartialMap(roots.weak_array_list_map());
604  FinalizePartialMap(roots.trusted_weak_fixed_array_map());
605  FinalizePartialMap(roots.protected_weak_fixed_array_map());
606  FinalizePartialMap(roots.fixed_cow_array_map());
607  FinalizePartialMap(roots.descriptor_array_map());
608  FinalizePartialMap(roots.undefined_map());
609  roots.undefined_map()->set_is_undetectable(true);
610  FinalizePartialMap(roots.null_map());
611  roots.null_map()->set_is_undetectable(true);
612  FinalizePartialMap(roots.boolean_map());
613  FinalizePartialMap(roots.heap_number_map());
614  FinalizePartialMap(roots.bigint_map());
615  FinalizePartialMap(roots.hole_map());
616  FinalizePartialMap(roots.symbol_map());
617  FinalizePartialMap(roots.free_space_map());
618  FinalizePartialMap(roots.one_pointer_filler_map());
619  FinalizePartialMap(roots.two_pointer_filler_map());
620  for (const StructInit& entry : kStructTable) {
621  if (!is_important_struct(entry.type)) continue;
622  FinalizePartialMap(Cast<Map>(roots.object_at(entry.index)));
623  }
624  for (const StringTypeInit& entry : kStringTypeTable) {
625  FinalizePartialMap(Cast<Map>(roots.object_at(entry.index)));
626  }
627 
628 #define ALLOCATE_MAP(instance_type, size, field_name) \
629  { \
630  Tagged<Map> map; \
631  if (!AllocateMap(AllocationType::kReadOnly, (instance_type), size) \
632  .To(&map)) { \
633  return false; \
634  } \
635  set_##field_name##_map(map); \
636  }
637 
638 #define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \
639  ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name)
640 
641 #define ALLOCATE_PRIMITIVE_MAP(instance_type, size, field_name, \
642  constructor_function_index) \
643  { \
644  ALLOCATE_MAP((instance_type), (size), field_name); \
645  roots.field_name##_map()->SetConstructorFunctionIndex( \
646  (constructor_function_index)); \
647  }
648 
649  { // Map allocation
650  ALLOCATE_VARSIZE_MAP(SCOPE_INFO_TYPE, scope_info)
651  ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_info)
652  ALLOCATE_VARSIZE_MAP(CLOSURE_FEEDBACK_CELL_ARRAY_TYPE,
653  closure_feedback_cell_array)
654  ALLOCATE_VARSIZE_MAP(FEEDBACK_VECTOR_TYPE, feedback_vector)
655 
656  ALLOCATE_MAP(FOREIGN_TYPE, Foreign::kSize, foreign)
657  ALLOCATE_MAP(TRUSTED_FOREIGN_TYPE, TrustedForeign::kSize, trusted_foreign)
658  ALLOCATE_MAP(MEGA_DOM_HANDLER_TYPE, MegaDomHandler::kSize, mega_dom_handler)
659 
660  ALLOCATE_VARSIZE_MAP(FIXED_DOUBLE_ARRAY_TYPE, fixed_double_array)
661  roots.fixed_double_array_map()->set_elements_kind(HOLEY_DOUBLE_ELEMENTS);
662  ALLOCATE_VARSIZE_MAP(FEEDBACK_METADATA_TYPE, feedback_metadata)
663  ALLOCATE_VARSIZE_MAP(BYTE_ARRAY_TYPE, byte_array)
664  ALLOCATE_VARSIZE_MAP(TRUSTED_BYTE_ARRAY_TYPE, trusted_byte_array)
665  ALLOCATE_VARSIZE_MAP(BYTECODE_ARRAY_TYPE, bytecode_array)
666  ALLOCATE_VARSIZE_MAP(PROPERTY_ARRAY_TYPE, property_array)
667  ALLOCATE_VARSIZE_MAP(SMALL_ORDERED_HASH_MAP_TYPE, small_ordered_hash_map)
668  ALLOCATE_VARSIZE_MAP(SMALL_ORDERED_HASH_SET_TYPE, small_ordered_hash_set)
669  ALLOCATE_VARSIZE_MAP(SMALL_ORDERED_NAME_DICTIONARY_TYPE,
670  small_ordered_name_dictionary)
671 
672  ALLOCATE_VARSIZE_MAP(INSTRUCTION_STREAM_TYPE, instruction_stream)
673 
674  ALLOCATE_MAP(CELL_TYPE, Cell::kSize, cell);
675  {
676  // The invalid_prototype_validity_cell is needed for JSObject maps.
678  AllocationResult alloc =
680  if (!alloc.To(&obj)) return false;
681  obj->set_map_after_allocation(isolate(), roots.cell_map(),
683  Cast<Cell>(obj)->set_value(value);
684  set_invalid_prototype_validity_cell(Cast<Cell>(obj));
685  }
686 
687  ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell)
688 
689  // The "no closures" and "one closure" FeedbackCell maps need
690  // to be marked unstable because their objects can change maps.
691  ALLOCATE_MAP(FEEDBACK_CELL_TYPE, FeedbackCell::kAlignedSize,
692  no_closures_cell)
693  roots.no_closures_cell_map()->mark_unstable();
694  ALLOCATE_MAP(FEEDBACK_CELL_TYPE, FeedbackCell::kAlignedSize,
695  one_closure_cell)
696  roots.one_closure_cell_map()->mark_unstable();
697  ALLOCATE_MAP(FEEDBACK_CELL_TYPE, FeedbackCell::kAlignedSize,
698  many_closures_cell)
699 
700  ALLOCATE_VARSIZE_MAP(TRANSITION_ARRAY_TYPE, transition_array)
701 
702  ALLOCATE_VARSIZE_MAP(HASH_TABLE_TYPE, hash_table)
703  ALLOCATE_VARSIZE_MAP(ORDERED_NAME_DICTIONARY_TYPE, ordered_name_dictionary)
704  ALLOCATE_VARSIZE_MAP(NAME_DICTIONARY_TYPE, name_dictionary)
705  ALLOCATE_VARSIZE_MAP(SWISS_NAME_DICTIONARY_TYPE, swiss_name_dictionary)
706  ALLOCATE_VARSIZE_MAP(GLOBAL_DICTIONARY_TYPE, global_dictionary)
707  ALLOCATE_VARSIZE_MAP(NUMBER_DICTIONARY_TYPE, number_dictionary)
708 
709  ALLOCATE_VARSIZE_MAP(REGISTERED_SYMBOL_TABLE_TYPE, registered_symbol_table)
710 
711  ALLOCATE_VARSIZE_MAP(ARRAY_LIST_TYPE, array_list)
712 
713  ALLOCATE_MAP(ACCESSOR_INFO_TYPE, AccessorInfo::kSize, accessor_info)
714  ALLOCATE_MAP(INTERCEPTOR_INFO_TYPE, InterceptorInfo::kSize,
715  interceptor_info)
716 
717  ALLOCATE_VARSIZE_MAP(PREPARSE_DATA_TYPE, preparse_data)
718  ALLOCATE_MAP(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kSize,
719  shared_function_info)
720  ALLOCATE_MAP(CODE_TYPE, Code::kSize, code)
721 
722  return true;
723  }
724 }
bool To(Tagged< T > *obj) const
static constexpr RawGCStateType kInitialGCState
static constexpr int SizeFor(int number_of_all_descriptors)
static const int kAlignedSize
Definition: feedback-cell.h:32
static bool InYoungGeneration(Tagged< Object > object)
V8_WARN_UNUSED_RESULT AllocationResult AllocatePartialMap(InstanceType instance_type, int instance_size)
RootsTable & roots_table()
Definition: heap-inl.h:69
V8_WARN_UNUSED_RESULT AllocationResult Allocate(DirectHandle< Map > map, AllocationType allocation)
void FinalizePartialMap(Tagged< Map > map)
static constexpr int kSize
Definition: hole.h:33
static constexpr int kPrototypeChainInvalid
Definition: map.h:537
static constexpr int kEmptyHashField
Definition: name.h:133
static constexpr uint8_t kNull
Definition: oddball.h:56
static constexpr uint8_t kUndefined
Definition: oddball.h:57
static constexpr uint8_t kFalse
Definition: oddball.h:53
static constexpr uint8_t kTrue
Definition: oddball.h:54
static constexpr int32_t SizeFor(int32_t length)
Definition: string-inl.h:1359
static constexpr int SizeForCapacity(int capacity)
Definition: fixed-array.h:626
force emit tier up logic from all non turbofan code
constexpr std::initializer_list< StringTypeInit > kStringTypeTable
constexpr std::initializer_list< StructInit > kStructTable
Tagged< DescriptorArray >
Definition: map-inl.h:52
const int kVariableSizeSentinel
Definition: objects.h:84
DONT_OVERRIDE DISABLE_ALLOCATION_SITES DISABLE_ALLOCATION_SITES HOLEY_DOUBLE_ELEMENTS
PerThreadAssertScopeDebugOnly< false, GC_MOLE > DisableGCMole
Definition: assert-scope.h:249
Local< Primitive > Null(Isolate *isolate)
Local< Primitive > Undefined(Isolate *isolate)
Local< Boolean > True(Isolate *isolate)
Local< Boolean > False(Isolate *isolate)
#define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name)
#define ALLOCATE_MAP(instance_type, size, field_name)
#define ALLOCATE_AND_SET_ROOT(Type, name, Size)
#define ALLOCATE_VARSIZE_MAP(instance_type, field_name)
#define DCHECK_NE(v1, v2)
Definition: logging.h:485

References Allocate(), ALLOCATE_AND_SET_ROOT, ALLOCATE_MAP, ALLOCATE_PARTIAL_MAP, ALLOCATE_VARSIZE_MAP, AllocatePartialMap(), AllocateRaw(), v8::internal::BIGINT_TYPE, code, v8::internal::DCHECK(), DCHECK_NE, FinalizePartialMap(), v8::internal::Smi::FromInt(), v8::internal::HOLEY_DOUBLE_ELEMENTS, v8::internal::anonymous_namespace{setup-heap-internal.cc}::InitializePartialMap(), v8::internal::HeapLayout::InYoungGeneration(), v8::internal::anonymous_namespace{setup-heap-internal.cc}::is_important_struct(), isolate(), v8::internal::FeedbackCell::kAlignedSize, v8::internal::Name::kEmptyHashField, v8::internal::Oddball::kFalse, v8::internal::DescriptorArrayMarkingState::kInitialGCState, v8::internal::DescriptorArray::kJsonFast, v8::internal::Oddball::kNull, v8::internal::Map::kPrototypeChainInvalid, v8::internal::kReadOnly, v8::internal::Hole::kSize, v8::internal::anonymous_namespace{setup-heap-internal.cc}::kStringTypeTable, v8::internal::anonymous_namespace{setup-heap-internal.cc}::kStructTable, v8::internal::kTaggedSize, v8::internal::Oddball::kTrue, v8::internal::Oddball::kUndefined, v8::internal::kVariableSizeSentinel, v8::internal::ReadOnlyRoots::object_at(), v8::internal::TaggedImpl< kRefType, StorageType >::ptr(), roots_table(), size(), v8::internal::TaggedArrayBase< WeakFixedArray, WeakFixedArrayShape >::SizeFor(), v8::internal::TaggedArrayBase< FixedArray, TaggedArrayShape >::SizeFor(), v8::internal::DescriptorArray::SizeFor(), v8::internal::SeqOneByteString::SizeFor(), v8::internal::WeakArrayList::SizeForCapacity(), v8::internal::SKIP_WRITE_BARRIER, v8::internal::AllocationResult::To(), and v8::internal::value.

Referenced by CreateReadOnlyHeapObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateFillerObjectAt()

void v8::internal::Heap::CreateFillerObjectAt ( Address  addr,
int  size,
ClearFreedMemoryMode  clear_memory_mode = ClearFreedMemoryMode::kDontClearFreedMemory 
)

Definition at line 3348 of file heap.cc.

3349  {
3350  if (size == 0) return;
3351  if (MemoryChunk::FromAddress(addr)->executable()) {
3352  WritableJitPage jit_page(addr, size);
3353  WritableFreeSpace free_space = jit_page.FreeRange(addr, size);
3354  CreateFillerObjectAtRaw(free_space, clear_memory_mode,
3357  } else {
3358  WritableFreeSpace free_space =
3360  CreateFillerObjectAtRaw(free_space, clear_memory_mode,
3363  }
3364 }
void CreateFillerObjectAtRaw(const WritableFreeSpace &free_space, ClearFreedMemoryMode clear_memory_mode, ClearRecordedSlots clear_slots_mode, VerifyNoSlotsRecorded verify_no_slots_recorded)
Definition: heap.cc:3366

References CreateFillerObjectAtRaw(), v8::internal::WritableFreeSpace::ForNonExecutableMemory(), v8::internal::WritableJitPage::FreeRange(), v8::internal::MemoryChunk::FromAddress(), v8::internal::kNo, kYes, and size().

Referenced by v8::internal::LargeObjectSpace::AdvanceAndInvokeAllocationObservers(), v8::internal::MainAllocator::AlignTopForTesting(), v8::internal::SemiSpace::AllocateFreshPage(), CreateReadOnlyObjects(), v8::internal::anonymous_namespace{read-only-promotion.cc}::ReadOnlyPromotionImpl::DeleteDeadObjects(), v8::internal::ReadOnlySpace::EnsureSpaceForAllocation(), v8::internal::ReadOnlySpace::FinalizeSpaceForDeserialization(), v8::internal::EvacuationAllocator::FreeLastInMainAllocator(), v8::internal::ReadOnlySpace::FreeLinearAllocationArea(), v8::internal::TranslatedValue::GetRawValue(), v8::internal::MainAllocator::InvokeAllocationObservers(), v8::internal::MainAllocator::MakeLinearAllocationAreaIterable(), PrecedeWithFiller(), v8::internal::ReadOnlySpace::RepairFreeSpacesAfterDeserialization(), v8::internal::ReadOnlySpace::ShrinkPages(), v8::internal::ReadOnlyPageMetadata::ShrinkToHighWaterMark(), StaticRootsEnsureAllocatedSize(), v8::internal::PagedSpaceAllocatorPolicy::TryAllocationFromFreeList(), and v8::internal::PagedSpaceAllocatorPolicy::TryExtendLAB().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateFillerObjectAtBackground()

void v8::internal::Heap::CreateFillerObjectAtBackground ( const WritableFreeSpace free_space)

Definition at line 3339 of file heap.cc.

3339  {
3340  // TODO(leszeks): Verify that no slots need to be recorded.
3341  // Do not verify whether slots are cleared here: the concurrent thread is not
3342  // allowed to access the main thread's remembered set.
3343  CreateFillerObjectAtRaw(free_space,
3346 }

References CreateFillerObjectAtRaw(), v8::internal::kDontClearFreedMemory, v8::internal::kNo, and kNo.

Referenced by AlignWithFillerBackground(), and PrecedeWithFillerBackground().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateFillerObjectAtRaw()

void v8::internal::Heap::CreateFillerObjectAtRaw ( const WritableFreeSpace free_space,
ClearFreedMemoryMode  clear_memory_mode,
ClearRecordedSlots  clear_slots_mode,
VerifyNoSlotsRecorded  verify_no_slots_recorded 
)
private

Definition at line 3366 of file heap.cc.

3369  {
3370  // TODO(mlippautz): It would be nice to DCHECK that we never call this
3371  // with {addr} pointing into large object space; however we currently do,
3372  // see, e.g., Factory::NewFillerObject and in many tests.
3373  size_t size = free_space.Size();
3374  if (size == 0) return;
3375  CreateFillerObjectAtImpl(free_space, this, clear_memory_mode);
3376  Address addr = free_space.Address();
3377  if (clear_slots_mode == ClearRecordedSlots::kYes) {
3378  ClearRecordedSlotRange(addr, addr + size);
3379  } else if (verify_no_slots_recorded == VerifyNoSlotsRecorded::kYes) {
3380  VerifyNoNeedToClearSlots(addr, addr + size);
3381  }
3382 }
void ClearRecordedSlotRange(Address start, Address end)
Definition: heap.cc:6649
void CreateFillerObjectAtImpl(const WritableFreeSpace &free_space, Heap *heap, ClearFreedMemoryMode clear_memory_mode)
Definition: heap.cc:3279
void VerifyNoNeedToClearSlots(Address start, Address end)
Definition: heap.cc:3334

References v8::internal::WritableFreeSpace::Address(), ClearRecordedSlotRange(), v8::internal::anonymous_namespace{heap.cc}::CreateFillerObjectAtImpl(), v8::internal::kYes, kYes, v8::internal::WritableFreeSpace::Size(), size(), and v8::internal::anonymous_namespace{heap.cc}::VerifyNoNeedToClearSlots().

Referenced by CreateFillerObjectAt(), CreateFillerObjectAtBackground(), LeftTrimFixedArray(), and NotifyObjectSizeChange().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateImportantReadOnlyObjects()

bool v8::internal::Heap::CreateImportantReadOnlyObjects ( )
private

Definition at line 918 of file setup-heap-internal.cc.

918  {
919  // Allocate some objects early to get addresses to fit as arm64 immediates.
920  Tagged<HeapObject> obj;
921  ReadOnlyRoots roots(isolate());
922  HandleScope initial_objects_handle_scope(isolate());
923 
924  // Hash seed for strings
925 
926  Factory* factory = isolate()->factory();
927  set_hash_seed(*factory->NewByteArray(kInt64Size, AllocationType::kReadOnly));
929 
930  // Important strings and symbols
931  for (const ConstantStringInit& entry : kImportantConstantStringTable) {
932  if (entry.index == RootIndex::kempty_string) {
933  // Special case the empty string, since it's allocated and initialised in
934  // the initial section.
936  } else {
937  DirectHandle<String> str = factory->InternalizeString(entry.contents);
938  roots_table()[entry.index] = str->ptr();
939  }
940  }
941 
942  {
943 #define SYMBOL_INIT(_, name) \
944  { \
945  DirectHandle<Symbol> symbol( \
946  isolate()->factory()->NewPrivateSymbol(AllocationType::kReadOnly)); \
947  roots_table()[RootIndex::k##name] = symbol->ptr(); \
948  }
950  // SYMBOL_INIT used again later.
951  }
952 
953  // Empty elements
954  DirectHandle<NameDictionary>
955  empty_property_dictionary = NameDictionary::New(
957  DCHECK(!empty_property_dictionary->HasSufficientCapacityToAdd(1));
958 
959  set_empty_property_dictionary(*empty_property_dictionary);
960 
961  // Allocate the empty OrderedNameDictionary
962  DirectHandle<OrderedNameDictionary> empty_ordered_property_dictionary =
964  .ToHandleChecked();
965  set_empty_ordered_property_dictionary(*empty_ordered_property_dictionary);
966 
967  {
969  .To(&obj)) {
970  return false;
971  }
972  obj->set_map_after_allocation(isolate(), roots.byte_array_map(),
974  Cast<ByteArray>(obj)->set_length(0);
975  set_empty_byte_array(Cast<ByteArray>(obj));
976  }
977 
978  {
979  AllocationResult alloc =
982  if (!alloc.To(&obj)) return false;
983  obj->set_map_after_allocation(isolate(), roots.scope_info_map(),
985  int flags = ScopeInfo::IsEmptyBit::encode(true);
986  DCHECK_EQ(ScopeInfo::LanguageModeBit::decode(flags), LanguageMode::kSloppy);
987  DCHECK_EQ(ScopeInfo::ReceiverVariableBits::decode(flags),
989  DCHECK_EQ(ScopeInfo::FunctionVariableBits::decode(flags),
991  Cast<ScopeInfo>(obj)->set_flags(flags, kRelaxedStore);
992  Cast<ScopeInfo>(obj)->set_context_local_count(0);
993  Cast<ScopeInfo>(obj)->set_parameter_count(0);
994  Cast<ScopeInfo>(obj)->set_position_info_start(0);
995  Cast<ScopeInfo>(obj)->set_position_info_end(0);
996  }
997  set_empty_scope_info(Cast<ScopeInfo>(obj));
998 
999  {
1001  .To(&obj)) {
1002  return false;
1003  }
1004  obj->set_map_after_allocation(isolate(), roots.property_array_map(),
1006  Cast<PropertyArray>(obj)->initialize_length(0);
1007  set_empty_property_array(Cast<PropertyArray>(obj));
1008  }
1009 
1010  // Heap Numbers
1011  // The -0 value must be set before NewNumber works.
1012  set_minus_zero_value(
1013  *factory->NewHeapNumber<AllocationType::kReadOnly>(-0.0));
1014  DCHECK(std::signbit(Object::NumberValue(roots.minus_zero_value())));
1015 
1016  set_nan_value(*factory->NewHeapNumber<AllocationType::kReadOnly>(
1017  std::numeric_limits<double>::quiet_NaN()));
1018 #ifdef V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
1019  set_undefined_nan_value(
1020  *factory->NewHeapNumberFromBits<AllocationType::kReadOnly>(
1021  kUndefinedNanInt64));
1022 #else
1023  set_undefined_nan_value(*factory->NewHeapNumber<AllocationType::kReadOnly>(
1024  std::numeric_limits<double>::quiet_NaN()));
1025 #endif // V8_ENABLE_EXPERIMENTAL_UNDEFINED_DOUBLE
1026  set_hole_nan_value(*factory->NewHeapNumberFromBits<AllocationType::kReadOnly>(
1027  kHoleNanInt64));
1028  set_infinity_value(
1029  *factory->NewHeapNumber<AllocationType::kReadOnly>(V8_INFINITY));
1030  set_minus_infinity_value(
1031  *factory->NewHeapNumber<AllocationType::kReadOnly>(-V8_INFINITY));
1032  set_max_safe_integer(
1033  *factory->NewHeapNumber<AllocationType::kReadOnly>(kMaxSafeInteger));
1034  set_max_uint_32(
1035  *factory->NewHeapNumber<AllocationType::kReadOnly>(kMaxUInt32));
1036  set_smi_min_value(
1037  *factory->NewHeapNumber<AllocationType::kReadOnly>(kSmiMinValue));
1038  set_smi_max_value_plus_one(
1039  *factory->NewHeapNumber<AllocationType::kReadOnly>(0.0 - kSmiMinValue));
1040 
1041  return true;
1042 }
friend class Factory
Definition: heap.h:2538
void InitializeHashSeed()
Definition: heap.cc:6087
StringTable * string_table() const
Definition: isolate.h:796
static V8_WARN_UNUSED_RESULT Handle< NameDictionary > New(IsolateT *isolate, int at_least_space_for, AllocationType allocation=AllocationType::kYoung, MinimumCapacity capacity_option=USE_DEFAULT_MINIMUM_CAPACITY)
static double NumberValue(Tagged< Number > obj)
Definition: objects-inl.h:675
static MaybeHandle< OrderedNameDictionary > AllocateEmpty(Isolate *isolate, AllocationType allocation=AllocationType::kReadOnly)
static constexpr int SizeFor(int length)
Definition: scope-info.h:301
void InsertEmptyStringForBootstrapping(Isolate *isolate)
#define V8_INFINITY
Definition: globals.h:25
#define IMPORTANT_PRIVATE_SYMBOL_LIST_GENERATOR(V, _)
Definition: heap-symbols.h:887
constexpr std::initializer_list< ConstantStringInit > kImportantConstantStringTable
constexpr double kMaxSafeInteger
Definition: globals.h:1996
constexpr int kInt64Size
Definition: globals.h:403
@ USE_CUSTOM_MINIMUM_CAPACITY
Definition: globals.h:1583
constexpr uint64_t kHoleNanInt64
Definition: globals.h:1975
const int kSmiMinValue
Definition: v8-internal.h:206
constexpr uint32_t kMaxUInt32
Definition: globals.h:388
static constexpr RelaxedStoreTag kRelaxedStore
Definition: globals.h:2945
#define SYMBOL_INIT(_, name)

References v8::internal::OrderedNameDictionary::AllocateEmpty(), AllocateRaw(), v8::internal::DCHECK(), DCHECK_EQ, v8::internal::Isolate::factory(), v8::internal::flags, IMPORTANT_PRIVATE_SYMBOL_LIST_GENERATOR, InitializeHashSeed(), v8::internal::StringTable::InsertEmptyStringForBootstrapping(), v8::internal::Factory::InternalizeString(), isolate(), v8::internal::kHoleNanInt64, v8::internal::anonymous_namespace{setup-heap-internal.cc}::kImportantConstantStringTable, v8::internal::kInt64Size, v8::internal::kMaxSafeInteger, v8::internal::kMaxUInt32, v8::internal::kReadOnly, v8::kRelaxedStore, v8::internal::kSloppy, v8::internal::kSmiMinValue, v8::internal::ScopeInfo::kVariablePartIndex, v8::internal::NameDictionary::New(), v8::internal::FactoryBase< Impl >::NewByteArray(), v8::internal::FactoryBase< Impl >::NewHeapNumber(), v8::internal::FactoryBase< Impl >::NewHeapNumberFromBits(), v8::internal::NONE, v8::internal::Object::NumberValue(), roots_table(), v8::internal::TaggedArrayBase< FixedArray, TaggedArrayShape >::SizeFor(), v8::internal::PrimitiveArrayBase< ByteArray, ByteArrayShape >::SizeFor(), v8::internal::ScopeInfo::SizeFor(), v8::internal::SKIP_WRITE_BARRIER, v8::internal::Isolate::string_table(), SYMBOL_INIT, v8::internal::AllocationResult::To(), v8::internal::USE_CUSTOM_MINIMUM_CAPACITY, and V8_INFINITY.

Referenced by CreateReadOnlyHeapObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateInitialMutableObjects()

void v8::internal::Heap::CreateInitialMutableObjects ( )
private

Definition at line 1404 of file setup-heap-internal.cc.

1404  {
1405  HandleScope initial_objects_handle_scope(isolate());
1406  Factory* factory = isolate()->factory();
1407  ReadOnlyRoots roots(this);
1408 
1409  // There's no "current microtask" in the beginning.
1410  set_current_microtask(roots.undefined_value());
1411 
1412  set_weak_refs_keep_during_job(roots.undefined_value());
1413 
1414  set_public_symbol_table(roots.empty_symbol_table());
1415  set_api_symbol_table(roots.empty_symbol_table());
1416  set_api_private_symbol_table(roots.empty_symbol_table());
1417 
1418  set_smi_string_cache(
1420  set_double_string_cache(
1422 
1423  // Unchecked to skip failing checks since required roots are uninitialized.
1424  set_basic_block_profiling_data(roots.unchecked_empty_array_list());
1425 
1426  // Allocate regexp caches.
1427  set_string_split_cache(*factory->NewFixedArray(
1429  set_regexp_multiple_cache(*factory->NewFixedArray(
1431  set_regexp_match_global_atom_cache(*factory->NewFixedArray(
1433 
1434  set_detached_contexts(roots.empty_weak_array_list());
1435 
1436  set_feedback_vectors_for_profiling_tools(roots.undefined_value());
1437  set_functions_marked_for_manual_optimization(roots.undefined_value());
1438  set_shared_wasm_memories(roots.empty_weak_array_list());
1439  set_locals_block_list_cache(roots.undefined_value());
1440 #ifdef V8_ENABLE_WEBASSEMBLY
1441  set_active_suspender(roots.undefined_value());
1442  set_js_to_wasm_wrappers(roots.empty_weak_fixed_array());
1443  set_wasm_canonical_rtts(roots.empty_weak_fixed_array());
1444 #endif // V8_ENABLE_WEBASSEMBLY
1445 
1446  set_script_list(roots.empty_weak_array_list());
1447 
1448  set_materialized_objects(*factory->NewFixedArray(0, AllocationType::kOld));
1449 
1450  // Handling of script id generation is in Heap::NextScriptId().
1451  set_last_script_id(Smi::FromInt(v8::UnboundScript::kNoScriptId));
1452  set_last_debugging_id(Smi::FromInt(DebugInfo::kNoDebuggingId));
1453  set_last_stack_trace_id(Smi::zero());
1454  set_next_template_serial_number(
1456 
1457  // Allocate the empty script.
1458  DirectHandle<Script> script = factory->NewScript(factory->empty_string());
1459  script->set_type(Script::Type::kNative);
1460  // This is used for exceptions thrown with no stack frames. Such exceptions
1461  // can be shared everywhere.
1462  script->set_origin_options(ScriptOriginOptions(true, false));
1463  set_empty_script(*script);
1464 
1465  // Protectors
1466  set_array_buffer_detaching_protector(*factory->NewProtector());
1467  set_array_iterator_protector(*factory->NewProtector());
1468  set_array_species_protector(*factory->NewProtector());
1469  set_no_date_time_configuration_change_protector(*factory->NewProtector());
1470  set_is_concat_spreadable_protector(*factory->NewProtector());
1471  set_map_iterator_protector(*factory->NewProtector());
1472  set_no_elements_protector(*factory->NewProtector());
1473  set_mega_dom_protector(*factory->NewProtector());
1474  set_no_profiling_protector(*factory->NewProtector());
1475  set_no_undetectable_objects_protector(*factory->NewProtector());
1476  set_promise_hook_protector(*factory->NewProtector());
1477  set_promise_resolve_protector(*factory->NewProtector());
1478  set_promise_species_protector(*factory->NewProtector());
1479  set_promise_then_protector(*factory->NewProtector());
1480  set_regexp_species_protector(*factory->NewProtector());
1481  set_set_iterator_protector(*factory->NewProtector());
1482  set_string_iterator_protector(*factory->NewProtector());
1483  set_string_length_protector(*factory->NewProtector());
1484  set_string_wrapper_to_primitive_protector(*factory->NewProtector());
1485  set_number_string_not_regexp_like_protector(*factory->NewProtector());
1486  set_typed_array_species_protector(*factory->NewProtector());
1487 
1488  set_serialized_objects(roots.empty_fixed_array());
1489  set_serialized_global_proxy_sizes(roots.empty_fixed_array());
1490 
1491  // Evaluate the hash values which will then be cached in the strings.
1492  isolate()->factory()->zero_string()->EnsureHash();
1493  isolate()->factory()->one_string()->EnsureHash();
1494 
1495  // Initialize builtins constants table.
1496  set_builtins_constants_table(roots.empty_fixed_array());
1497 
1498  // Initialize descriptor cache.
1500 
1501  // Initialize compilation cache.
1503 
1504  // Error.stack accessor callbacks and their SharedFunctionInfos:
1505  {
1506  DirectHandle<FunctionTemplateInfo> function_template;
1508  isolate_, Accessors::ErrorStackGetter, 0,
1511  isolate_, function_template);
1512  set_error_stack_getter_fun_template(*function_template);
1513 
1515  isolate_, Accessors::ErrorStackSetter, 1,
1518  isolate_, function_template);
1519  set_error_stack_setter_fun_template(*function_template);
1520  }
1521 
1522  // Create internal SharedFunctionInfos.
1523  // Async functions:
1524  {
1525  DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1526  isolate(), Builtin::kAsyncFunctionAwaitRejectClosure, 1);
1527  set_async_function_await_reject_closure_shared_fun(*info);
1528 
1529  info = CreateSharedFunctionInfo(
1530  isolate(), Builtin::kAsyncFunctionAwaitResolveClosure, 1);
1531  set_async_function_await_resolve_closure_shared_fun(*info);
1532  }
1533 
1534  // Async generators:
1535  {
1536  DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1537  isolate(), Builtin::kAsyncGeneratorAwaitResolveClosure, 1);
1538  set_async_generator_await_resolve_closure_shared_fun(*info);
1539 
1540  info = CreateSharedFunctionInfo(
1541  isolate(), Builtin::kAsyncGeneratorAwaitRejectClosure, 1);
1542  set_async_generator_await_reject_closure_shared_fun(*info);
1543 
1544  info = CreateSharedFunctionInfo(
1545  isolate(), Builtin::kAsyncGeneratorYieldWithAwaitResolveClosure, 1);
1546  set_async_generator_yield_with_await_resolve_closure_shared_fun(*info);
1547 
1548  info = CreateSharedFunctionInfo(
1549  isolate(), Builtin::kAsyncGeneratorReturnResolveClosure, 1);
1550  set_async_generator_return_resolve_closure_shared_fun(*info);
1551 
1552  info = CreateSharedFunctionInfo(
1553  isolate(), Builtin::kAsyncGeneratorReturnClosedResolveClosure, 1);
1554  set_async_generator_return_closed_resolve_closure_shared_fun(*info);
1555 
1556  info = CreateSharedFunctionInfo(
1557  isolate(), Builtin::kAsyncGeneratorReturnClosedRejectClosure, 1);
1558  set_async_generator_return_closed_reject_closure_shared_fun(*info);
1559  }
1560 
1561  // AsyncIterator:
1562  {
1563  DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1564  isolate_, Builtin::kAsyncIteratorValueUnwrap, 1);
1565  set_async_iterator_value_unwrap_shared_fun(*info);
1566 
1567  info = CreateSharedFunctionInfo(
1568  isolate_, Builtin::kAsyncIteratorPrototypeAsyncDisposeResolveClosure,
1569  0);
1570  set_async_iterator_prototype_async_dispose_resolve_closure_shared_fun(
1571  *info);
1572  }
1573 
1574  // AsyncFromSyncIterator:
1575  {
1576  DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1577  isolate_, Builtin::kAsyncFromSyncIteratorCloseSyncAndRethrow, 1);
1578  set_async_from_sync_iterator_close_sync_and_rethrow_shared_fun(*info);
1579  }
1580 
1581  // Promises:
1582  {
1583  DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1584  isolate_, Builtin::kPromiseCapabilityDefaultResolve, 1,
1586  info->set_native(true);
1587  info->set_function_map_index(
1588  Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX);
1589  set_promise_capability_default_resolve_shared_fun(*info);
1590 
1592  Builtin::kPromiseCapabilityDefaultReject, 1,
1594  info->set_native(true);
1595  info->set_function_map_index(
1596  Context::STRICT_FUNCTION_WITHOUT_PROTOTYPE_MAP_INDEX);
1597  set_promise_capability_default_reject_shared_fun(*info);
1598 
1599  info = CreateSharedFunctionInfo(
1600  isolate_, Builtin::kPromiseGetCapabilitiesExecutor, 2);
1601  set_promise_get_capabilities_executor_shared_fun(*info);
1602  }
1603 
1604  // Promises / finally:
1605  {
1606  DirectHandle<SharedFunctionInfo> info =
1607  CreateSharedFunctionInfo(isolate(), Builtin::kPromiseThenFinally, 1);
1608  info->set_native(true);
1609  set_promise_then_finally_shared_fun(*info);
1610 
1611  info =
1612  CreateSharedFunctionInfo(isolate(), Builtin::kPromiseCatchFinally, 1);
1613  info->set_native(true);
1614  set_promise_catch_finally_shared_fun(*info);
1615 
1617  Builtin::kPromiseValueThunkFinally, 0);
1618  set_promise_value_thunk_finally_shared_fun(*info);
1619 
1620  info =
1621  CreateSharedFunctionInfo(isolate(), Builtin::kPromiseThrowerFinally, 0);
1622  set_promise_thrower_finally_shared_fun(*info);
1623  }
1624 
1625  // Promise combinators:
1626  {
1627  DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1628  isolate_, Builtin::kPromiseAllResolveElementClosure, 1);
1629  set_promise_all_resolve_element_closure_shared_fun(*info);
1630 
1631  info = CreateSharedFunctionInfo(
1632  isolate_, Builtin::kPromiseAllSettledResolveElementClosure, 1);
1633  set_promise_all_settled_resolve_element_closure_shared_fun(*info);
1634 
1635  info = CreateSharedFunctionInfo(
1636  isolate_, Builtin::kPromiseAllSettledRejectElementClosure, 1);
1637  set_promise_all_settled_reject_element_closure_shared_fun(*info);
1638 
1639  info = CreateSharedFunctionInfo(
1640  isolate_, Builtin::kPromiseAnyRejectElementClosure, 1);
1641  set_promise_any_reject_element_closure_shared_fun(*info);
1642  }
1643 
1644  // ProxyRevoke:
1645  {
1646  DirectHandle<SharedFunctionInfo> info =
1647  CreateSharedFunctionInfo(isolate_, Builtin::kProxyRevoke, 0);
1648  set_proxy_revoke_shared_fun(*info);
1649  }
1650 
1651  // ShadowRealm:
1652  {
1653  DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1654  isolate_, Builtin::kShadowRealmImportValueFulfilled, 1);
1655  set_shadow_realm_import_value_fulfilled_shared_fun(*info);
1656  }
1657 
1658  // SourceTextModule:
1659  {
1660  DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1661  isolate_, Builtin::kCallAsyncModuleFulfilled, 0);
1662  set_source_text_module_execute_async_module_fulfilled_sfi(*info);
1663 
1664  info = CreateSharedFunctionInfo(isolate_, Builtin::kCallAsyncModuleRejected,
1665  0);
1666  set_source_text_module_execute_async_module_rejected_sfi(*info);
1667  }
1668 
1669  // Array.fromAsync:
1670  {
1671  DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1672  isolate_, Builtin::kArrayFromAsyncIterableOnFulfilled, 1);
1673  set_array_from_async_iterable_on_fulfilled_shared_fun(*info);
1674 
1675  info = CreateSharedFunctionInfo(
1676  isolate_, Builtin::kArrayFromAsyncIterableOnRejected, 1);
1677  set_array_from_async_iterable_on_rejected_shared_fun(*info);
1678 
1679  info = CreateSharedFunctionInfo(
1680  isolate_, Builtin::kArrayFromAsyncArrayLikeOnFulfilled, 1);
1681  set_array_from_async_array_like_on_fulfilled_shared_fun(*info);
1682 
1683  info = CreateSharedFunctionInfo(
1684  isolate_, Builtin::kArrayFromAsyncArrayLikeOnRejected, 1);
1685  set_array_from_async_array_like_on_rejected_shared_fun(*info);
1686  }
1687 
1688  // Atomics.Mutex
1689  {
1690  DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1691  isolate_, Builtin::kAtomicsMutexAsyncUnlockResolveHandler, 1);
1692  set_atomics_mutex_async_unlock_resolve_handler_sfi(*info);
1693  info = CreateSharedFunctionInfo(
1694  isolate_, Builtin::kAtomicsMutexAsyncUnlockRejectHandler, 1);
1695  set_atomics_mutex_async_unlock_reject_handler_sfi(*info);
1696  }
1697 
1698  // Atomics.Condition
1699  {
1700  DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1701  isolate_, Builtin::kAtomicsConditionAcquireLock, 0);
1702  set_atomics_condition_acquire_lock_sfi(*info);
1703  }
1704 
1705  // Async Disposable Stack
1706  {
1707  DirectHandle<SharedFunctionInfo> info = CreateSharedFunctionInfo(
1708  isolate_, Builtin::kAsyncDisposableStackOnFulfilled, 0);
1709  set_async_disposable_stack_on_fulfilled_shared_fun(*info);
1710 
1711  info = CreateSharedFunctionInfo(
1712  isolate_, Builtin::kAsyncDisposableStackOnRejected, 0);
1713  set_async_disposable_stack_on_rejected_shared_fun(*info);
1714 
1716  Builtin::kAsyncDisposeFromSyncDispose, 0);
1717  set_async_dispose_from_sync_dispose_shared_fun(*info);
1718  }
1719 
1720  // Trusted roots:
1721  // TODO(saelo): these would ideally be read-only and shared, but we currently
1722  // don't have a trusted RO space.
1723  {
1724  set_empty_trusted_byte_array(*TrustedByteArray::New(isolate_, 0));
1725  set_empty_trusted_fixed_array(*TrustedFixedArray::New(isolate_, 0));
1726  set_empty_trusted_weak_fixed_array(
1728  set_empty_protected_fixed_array(*ProtectedFixedArray::New(isolate_, 0));
1729  set_empty_protected_weak_fixed_array(
1731  }
1732 }
static const int kNoScriptId
Definition: v8-script.h:91
static DirectHandle< FunctionTemplateInfo > CreateAccessorFunctionTemplateInfo(Isolate *isolate, FunctionCallback callback, int length, v8::SideEffectType side_effect_type)
Definition: api-natives.cc:484
static const int kNoDebuggingId
static DirectHandle< DoubleStringCache > New(IsolateT *isolate, int capacity)
static void SealAndPrepareForPromotionToReadOnly(Isolate *isolate, DirectHandle< FunctionTemplateInfo > info)
Definition: templates.cc:127
DescriptorLookupCache * descriptor_lookup_cache() const
Definition: isolate.h:1404
CompilationCache * compilation_cache()
Definition: isolate.h:1206
static Handle< ProtectedFixedArray > New(IsolateT *isolate, int capacity, bool shared=false)
static Handle< ProtectedWeakFixedArray > New(IsolateT *isolate, int capacity)
static constexpr int kRegExpResultsCacheSize
Definition: regexp.h:240
static DirectHandle< SmiStringCache > New(IsolateT *isolate, int capacity)
static constexpr int kInitialSize
static const int kUninitializedSerialNumber
Definition: templates.h:41
static Handle< TrustedByteArray > New(IsolateT *isolate, int capacity, AllocationType allocation_type=AllocationType::kTrusted)
static Handle< TrustedFixedArray > New(IsolateT *isolate, int capacity, AllocationType allocation=AllocationType::kTrusted)
static Handle< TrustedWeakFixedArray > New(IsolateT *isolate, int capacity)
DirectHandle< SharedFunctionInfo > CreateSharedFunctionInfo(Isolate *isolate, Builtin builtin, int len, FunctionKind kind=FunctionKind::kNormalFunction)
kInterpreterTrampolineOffset script

References v8::internal::CompilationCache::Clear(), v8::internal::DescriptorLookupCache::Clear(), v8::internal::Isolate::compilation_cache(), v8::internal::ApiNatives::CreateAccessorFunctionTemplateInfo(), v8::internal::anonymous_namespace{setup-heap-internal.cc}::CreateSharedFunctionInfo(), v8::internal::Isolate::descriptor_lookup_cache(), v8::internal::Isolate::factory(), v8::internal::Smi::FromInt(), isolate(), isolate_, v8::internal::kConciseMethod, v8::kHasSideEffect, v8::kHasSideEffectToReceiver, v8::internal::SmiStringCache::kInitialSize, v8::internal::DoubleStringCache::kInitialSize, v8::internal::Script::kNative, v8::internal::DebugInfo::kNoDebuggingId, v8::UnboundScript::kNoScriptId, v8::internal::kOld, v8::internal::RegExpResultsCache::kRegExpResultsCacheSize, v8::internal::RegExpResultsCache_MatchGlobalAtom::kSize, v8::internal::TemplateInfo::kUninitializedSerialNumber, v8::internal::TrustedWeakFixedArray::New(), v8::internal::ProtectedWeakFixedArray::New(), v8::internal::SmiStringCache::New(), v8::internal::DoubleStringCache::New(), v8::internal::TrustedFixedArray::New(), v8::internal::TrustedByteArray::New(), v8::internal::ProtectedFixedArray::New(), v8::internal::FactoryBase< Impl >::NewFixedArray(), v8::internal::Factory::NewProtector(), v8::internal::FactoryBase< Impl >::NewScript(), v8::internal::script, v8::internal::FunctionTemplateInfo::SealAndPrepareForPromotionToReadOnly(), and v8::internal::Smi::zero().

Referenced by CreateMutableHeapObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateInternalAccessorInfoObjects()

void v8::internal::Heap::CreateInternalAccessorInfoObjects ( )
private

Definition at line 1734 of file setup-heap-internal.cc.

1734  {
1735  Isolate* isolate = this->isolate();
1736  HandleScope scope(isolate);
1737  DirectHandle<AccessorInfo> accessor_info;
1738 
1739 #define INIT_ACCESSOR_INFO(_, accessor_name, AccessorName, ...) \
1740  accessor_info = Accessors::Make##AccessorName##Info(isolate); \
1741  roots_table()[RootIndex::k##AccessorName##Accessor] = accessor_info->ptr();
1743 #undef INIT_ACCESSOR_INFO
1744 
1745 #define INIT_SIDE_EFFECT_FLAG(_, accessor_name, AccessorName, GetterType, \
1746  SetterType) \
1747  Cast<AccessorInfo>( \
1748  Tagged<Object>(roots_table()[RootIndex::k##AccessorName##Accessor])) \
1749  ->set_getter_side_effect_type(SideEffectType::GetterType); \
1750  Cast<AccessorInfo>( \
1751  Tagged<Object>(roots_table()[RootIndex::k##AccessorName##Accessor])) \
1752  ->set_setter_side_effect_type(SideEffectType::SetterType);
1754 #undef INIT_SIDE_EFFECT_FLAG
1755 }
#define ACCESSOR_INFO_LIST_GENERATOR(V, _)
Definition: accessors.h:25
#define INIT_SIDE_EFFECT_FLAG(_, accessor_name, AccessorName, GetterType, SetterType)
#define INIT_ACCESSOR_INFO(_, accessor_name, AccessorName,...)

References ACCESSOR_INFO_LIST_GENERATOR, INIT_ACCESSOR_INFO, INIT_SIDE_EFFECT_FLAG, and isolate().

Referenced by CreateMutableHeapObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateLateReadOnlyJSReceiverMaps()

bool v8::internal::Heap::CreateLateReadOnlyJSReceiverMaps ( )
private

Definition at line 823 of file setup-heap-internal.cc.

823  {
824 #define ALLOCATE_ALWAYS_SHARED_SPACE_JSOBJECT_MAP(instance_type, size, \
825  field_name) \
826  { \
827  Tagged<Map> map; \
828  if (!AllocateMap(AllocationType::kReadOnly, (instance_type), size, \
829  DICTIONARY_ELEMENTS) \
830  .To(&map)) { \
831  return false; \
832  } \
833  AlwaysSharedSpaceJSObject::PrepareMapNoEnumerableProperties(map); \
834  set_##field_name##_map(map); \
835  }
836 
837  HandleScope late_jsreceiver_maps_handle_scope(isolate());
838  Factory* factory = isolate()->factory();
839  ReadOnlyRoots roots(this);
840 
841  {
842  // JSMessageObject and JSExternalObject types are wrappers around a set
843  // of primitive values and exist only for the purpose of passing the data
844  // across V8 Api. They are not supposed to be leaked to user JS code
845  // except from d8 tests and they are not proper JSReceivers.
846  ALLOCATE_MAP(JS_MESSAGE_OBJECT_TYPE, JSMessageObject::kHeaderSize,
847  message_object)
848  roots.message_object_map()->SetEnumLength(0);
849  roots.message_object_map()->set_is_extensible(false);
850 
851  ALLOCATE_MAP(JS_EXTERNAL_OBJECT_TYPE, JSExternalObject::kHeaderSize,
852  external)
853  roots.external_map()->SetEnumLength(0);
854  roots.external_map()->set_is_extensible(false);
855 
856  ALLOCATE_MAP(CPP_HEAP_EXTERNAL_OBJECT_TYPE,
857  CppHeapExternalObject::kHeaderSize, cpp_heap_external)
858  }
859 
860  // Shared space object maps are immutable and can be in RO space.
861  {
862  Tagged<Map> shared_array_map;
863  if (!AllocateMap(AllocationType::kReadOnly, JS_SHARED_ARRAY_TYPE,
866  .To(&shared_array_map)) {
867  return false;
868  }
870  shared_array_map);
871  DirectHandle<DescriptorArray> descriptors =
872  factory->NewDescriptorArray(1, 0, AllocationType::kReadOnly);
873  Descriptor length_descriptor = Descriptor::DataField(
874  factory->length_string(), JSSharedArray::kLengthFieldIndex,
876  MaybeObjectDirectHandle(FieldType::Any(isolate())));
877  descriptors->Set(InternalIndex(0), &length_descriptor);
878  shared_array_map->InitializeDescriptors(isolate(), *descriptors);
879  set_js_shared_array_map(shared_array_map);
880  }
881 
883  JS_ATOMICS_MUTEX_TYPE, JSAtomicsMutex::kHeaderSize, js_atomics_mutex)
884  ALLOCATE_ALWAYS_SHARED_SPACE_JSOBJECT_MAP(JS_ATOMICS_CONDITION_TYPE,
885  JSAtomicsCondition::kHeaderSize,
886  js_atomics_condition)
887 
888 #undef ALLOCATE_ALWAYS_SHARED_SPACE_JSOBJECT_MAP
889 #undef ALLOCATE_PRIMITIVE_MAP
890 #undef ALLOCATE_VARSIZE_MAP
891 #undef ALLOCATE_MAP
892 
893  return true;
894 }
static void PrepareMapNoEnumerableProperties(Tagged< Map > map)
Definition: js-struct.cc:34
static Descriptor DataField(Isolate *isolate, DirectHandle< Name > key, int field_index, PropertyAttributes attributes, Representation representation)
Definition: property.cc:81
static V8_EXPORT_PRIVATE Tagged< FieldType > Any()
Definition: field-type.cc:22
V8_WARN_UNUSED_RESULT AllocationResult AllocateMap(AllocationType allocation_type, InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND, int inobject_properties=0)
static constexpr int kSize
static constexpr Representation Smi()
#define ALLOCATE_ALWAYS_SHARED_SPACE_JSOBJECT_MAP(instance_type, size, field_name)

References v8::internal::ALL_ATTRIBUTES_MASK, ALLOCATE_ALWAYS_SHARED_SPACE_JSOBJECT_MAP, ALLOCATE_MAP, AllocateMap(), v8::internal::FieldType::Any(), v8::internal::Descriptor::DataField(), v8::internal::Isolate::factory(), isolate(), v8::internal::kConst, v8::internal::JSSharedArray::kInObjectFieldCount, v8::internal::JSSharedArray::kLengthFieldIndex, v8::internal::kReadOnly, v8::internal::JSSharedArray::kSize, v8::internal::FactoryBase< Impl >::NewDescriptorArray(), v8::internal::AlwaysSharedSpaceJSObject::PrepareMapNoEnumerableProperties(), v8::internal::SHARED_ARRAY_ELEMENTS, and v8::internal::Representation::Smi().

Referenced by CreateReadOnlyHeapObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateLateReadOnlyNonJSReceiverMaps()

bool v8::internal::Heap::CreateLateReadOnlyNonJSReceiverMaps ( )
private

Definition at line 726 of file setup-heap-internal.cc.

726  {
727  ReadOnlyRoots roots(this);
728  {
729  // Setup the struct maps.
730  for (const StructInit& entry : kStructTable) {
731  if (is_important_struct(entry.type)) continue;
732  Tagged<Map> map;
733  if (!AllocateMap(AllocationType::kReadOnly, entry.type, entry.size)
734  .To(&map))
735  return false;
736  roots_table()[entry.index] = map.ptr();
737  }
738 
739 #define TORQUE_ALLOCATE_MAP(NAME, Name, name) \
740  ALLOCATE_MAP(NAME, Name::SizeFor(), name)
741  TORQUE_DEFINED_FIXED_INSTANCE_TYPE_LIST(TORQUE_ALLOCATE_MAP);
742 #undef TORQUE_ALLOCATE_MAP
743 
744 #define TORQUE_ALLOCATE_VARSIZE_MAP(NAME, Name, name) \
745  /* The DescriptorArray map is pre-allocated and initialized above. */ \
746  if (NAME != DESCRIPTOR_ARRAY_TYPE) { \
747  ALLOCATE_VARSIZE_MAP(NAME, name) \
748  }
749  TORQUE_DEFINED_VARSIZE_INSTANCE_TYPE_LIST(TORQUE_ALLOCATE_VARSIZE_MAP);
750 #undef TORQUE_ALLOCATE_VARSIZE_MAP
751 
752  ALLOCATE_VARSIZE_MAP(ORDERED_HASH_MAP_TYPE, ordered_hash_map)
753  ALLOCATE_VARSIZE_MAP(ORDERED_HASH_SET_TYPE, ordered_hash_set)
754 
755  ALLOCATE_VARSIZE_MAP(SIMPLE_NUMBER_DICTIONARY_TYPE,
756  simple_number_dictionary)
757  ALLOCATE_VARSIZE_MAP(SIMPLE_NAME_DICTIONARY_TYPE, simple_name_dictionary)
758  ALLOCATE_VARSIZE_MAP(NAME_TO_INDEX_HASH_TABLE_TYPE,
759  name_to_index_hash_table)
760  ALLOCATE_VARSIZE_MAP(DOUBLE_STRING_CACHE_TYPE, double_string_cache)
761 
762  ALLOCATE_VARSIZE_MAP(EMBEDDER_DATA_ARRAY_TYPE, embedder_data_array)
763  ALLOCATE_VARSIZE_MAP(EPHEMERON_HASH_TABLE_TYPE, ephemeron_hash_table)
764 
765  ALLOCATE_VARSIZE_MAP(SCRIPT_CONTEXT_TABLE_TYPE, script_context_table)
766 
767  ALLOCATE_VARSIZE_MAP(OBJECT_BOILERPLATE_DESCRIPTION_TYPE,
768  object_boilerplate_description)
769 
770  ALLOCATE_VARSIZE_MAP(COVERAGE_INFO_TYPE, coverage_info);
771  ALLOCATE_VARSIZE_MAP(REG_EXP_MATCH_INFO_TYPE, regexp_match_info);
772 
773  ALLOCATE_MAP(REG_EXP_DATA_TYPE, RegExpData::kSize, regexp_data);
774  ALLOCATE_MAP(ATOM_REG_EXP_DATA_TYPE, AtomRegExpData::kSize,
775  atom_regexp_data);
776  ALLOCATE_MAP(IR_REG_EXP_DATA_TYPE, IrRegExpData::kSize, ir_regexp_data);
777 
778  ALLOCATE_MAP(SOURCE_TEXT_MODULE_TYPE, SourceTextModule::kSize,
779  source_text_module)
780  ALLOCATE_MAP(SYNTHETIC_MODULE_TYPE, SyntheticModule::kSize,
781  synthetic_module)
782 
783  ALLOCATE_MAP(CONTEXT_CELL_TYPE, sizeof(ContextCell), context_cell)
784 
785  IF_WASM(ALLOCATE_MAP, WASM_IMPORT_DATA_TYPE, WasmImportData::kSize,
786  wasm_import_data)
787  IF_WASM(ALLOCATE_MAP, WASM_CAPI_FUNCTION_DATA_TYPE,
788  WasmCapiFunctionData::kSize, wasm_capi_function_data)
789  IF_WASM(ALLOCATE_MAP, WASM_EXPORTED_FUNCTION_DATA_TYPE,
790  WasmExportedFunctionData::kSize, wasm_exported_function_data)
791  IF_WASM(ALLOCATE_MAP, WASM_INTERNAL_FUNCTION_TYPE,
792  WasmInternalFunction::kSize, wasm_internal_function)
793  IF_WASM(ALLOCATE_MAP, WASM_FUNC_REF_TYPE, WasmFuncRef::kSize, wasm_func_ref)
794  IF_WASM(ALLOCATE_MAP, WASM_JS_FUNCTION_DATA_TYPE, WasmJSFunctionData::kSize,
795  wasm_js_function_data)
796  IF_WASM(ALLOCATE_MAP, WASM_RESUME_DATA_TYPE, WasmResumeData::kSize,
797  wasm_resume_data)
798  IF_WASM(ALLOCATE_MAP, WASM_SUSPENDER_OBJECT_TYPE,
799  WasmSuspenderObject::kSize, wasm_suspender_object)
800  IF_WASM(ALLOCATE_MAP, WASM_CONTINUATION_OBJECT_TYPE,
801  WasmContinuationObject::kSize, wasm_continuation_object)
802  IF_WASM(ALLOCATE_MAP, WASM_TYPE_INFO_TYPE, kVariableSizeSentinel,
803  wasm_type_info)
804  IF_WASM(ALLOCATE_MAP, WASM_NULL_TYPE, kVariableSizeSentinel, wasm_null);
805  IF_WASM(ALLOCATE_MAP, WASM_TRUSTED_INSTANCE_DATA_TYPE,
806  WasmTrustedInstanceData::kSize, wasm_trusted_instance_data);
807  IF_WASM(ALLOCATE_VARSIZE_MAP, WASM_DISPATCH_TABLE_TYPE,
808  wasm_dispatch_table);
809 
810  ALLOCATE_MAP(WEAK_CELL_TYPE, WeakCell::kSize, weak_cell)
811  ALLOCATE_MAP(INTERPRETER_DATA_TYPE, InterpreterData::kSize,
812  interpreter_data)
813  ALLOCATE_MAP(SHARED_FUNCTION_INFO_WRAPPER_TYPE,
814  SharedFunctionInfoWrapper::kSize, shared_function_info_wrapper)
815 
816  ALLOCATE_MAP(DICTIONARY_TEMPLATE_INFO_TYPE, DictionaryTemplateInfo::kSize,
817  dictionary_template_info)
818  }
819 
820  return true;
821 }
#define TORQUE_ALLOCATE_MAP(NAME, Name, name)
#define TORQUE_ALLOCATE_VARSIZE_MAP(NAME, Name, name)
#define IF_WASM(V,...)
Definition: macros.h:472

References ALLOCATE_MAP, ALLOCATE_VARSIZE_MAP, AllocateMap(), IF_WASM, v8::internal::anonymous_namespace{setup-heap-internal.cc}::is_important_struct(), v8::internal::kReadOnly, v8::internal::anonymous_namespace{setup-heap-internal.cc}::kStructTable, v8::internal::kVariableSizeSentinel, v8::internal::TaggedImpl< kRefType, StorageType >::ptr(), roots_table(), v8::internal::AllocationResult::To(), TORQUE_ALLOCATE_MAP, and TORQUE_ALLOCATE_VARSIZE_MAP.

Referenced by CreateReadOnlyHeapObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateMutableApiObjects()

void v8::internal::Heap::CreateMutableApiObjects ( )

Definition at line 1388 of file setup-heap-internal.cc.

1388  {
1389  HandleScope scope(isolate());
1390  set_message_listeners(*ArrayList::New(isolate(), 2, AllocationType::kOld));
1391 }
static DirectHandle< ArrayList > New(IsolateT *isolate, int capacity, AllocationType allocation=AllocationType::kYoung)

References isolate(), v8::internal::kOld, and v8::internal::ArrayList::New().

Referenced by CreateMutableHeapObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateMutableHeapObjects()

bool v8::internal::Heap::CreateMutableHeapObjects ( )

Definition at line 233 of file setup-heap-internal.cc.

233  {
234  ReadOnlyRoots roots(this);
235 
236  // Ensure that all young generation pages are iterable. It must be after heap
237  // setup, so that the maps have been created.
238  if (new_space()) new_space()->MakeIterable();
239 
241 
242  // Create initial objects
245  CHECK_EQ(0u, gc_count_);
246 
247  set_native_contexts_list(roots.undefined_value());
248  set_allocation_sites_list(roots.undefined_value());
249  set_dirty_js_finalization_registries_list(roots.undefined_value());
250  set_dirty_js_finalization_registries_list_tail(roots.undefined_value());
251 
252  return true;
253 }
void set_allocation_sites_list(Tagged< UnionOf< Smi, Undefined, AllocationSiteWithWeakNext >> object)
Definition: heap.h:509
void set_dirty_js_finalization_registries_list_tail(Tagged< Object > object)
Definition: heap.h:524
void set_dirty_js_finalization_registries_list(Tagged< Object > object)
Definition: heap.h:518
unsigned int gc_count_
Definition: heap.h:2275
virtual void MakeIterable()=0
#define CHECK_EQ(lhs, rhs)

References CHECK_EQ, CreateInitialMutableObjects(), CreateInternalAccessorInfoObjects(), CreateMutableApiObjects(), gc_count_, v8::internal::NewSpace::MakeIterable(), new_space(), set_allocation_sites_list(), set_dirty_js_finalization_registries_list(), set_dirty_js_finalization_registries_list_tail(), and set_native_contexts_list().

+ Here is the call graph for this function:

◆ CreateObjectStats()

void v8::internal::Heap::CreateObjectStats ( )

Definition at line 7279 of file heap.cc.

7279  {
7281  if (!live_object_stats_) {
7282  live_object_stats_.reset(new ObjectStats(this));
7283  }
7284  if (!dead_object_stats_) {
7285  dead_object_stats_.reset(new ObjectStats(this));
7286  }
7287 }
std::unique_ptr< ObjectStats > live_object_stats_
Definition: heap.h:2341
std::unique_ptr< ObjectStats > dead_object_stats_
Definition: heap.h:2342
static bool is_gc_stats_enabled()
Definition: tracing-flags.h:36
#define V8_LIKELY(condition)
Definition: v8config.h:661

References dead_object_stats_, v8::internal::TracingFlags::is_gc_stats_enabled(), live_object_stats_, and V8_LIKELY.

+ Here is the call graph for this function:

◆ CreateReadOnlyApiObjects()

void v8::internal::Heap::CreateReadOnlyApiObjects ( )

Definition at line 1393 of file setup-heap-internal.cc.

1393  {
1394  HandleScope scope(isolate());
1395  auto info =
1397  set_noop_interceptor_info(*info);
1398  // Make sure read only heap layout does not depend on the size of
1399  // ExternalPointer fields.
1401  3 * kTaggedSize + 7 * kSystemPointerSize);
1402 }
DirectHandle< InterceptorInfo > NewInterceptorInfo(AllocationType allocation=AllocationType::kOld)
Definition: factory.cc:1543
void StaticRootsEnsureAllocatedSize(DirectHandle< HeapObject > obj, int required)

References v8::internal::Isolate::factory(), isolate(), v8::internal::kReadOnly, v8::internal::kSystemPointerSize, v8::internal::kTaggedSize, v8::internal::Factory::NewInterceptorInfo(), and StaticRootsEnsureAllocatedSize().

Referenced by CreateReadOnlyHeapObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateReadOnlyHeapObjects()

bool v8::internal::Heap::CreateReadOnlyHeapObjects ( )

Definition at line 197 of file setup-heap-internal.cc.

197  {
198  // Create initial maps and important objects.
199  if (!CreateEarlyReadOnlyMapsAndObjects()) return false;
200  if (!CreateImportantReadOnlyObjects()) return false;
201 
202 #if V8_STATIC_ROOTS_BOOL
203  // The read only heap is sorted such that often used objects are allocated
204  // early for their compressed address to fit into 12bit arm immediates.
205  ReadOnlySpace* ro_space = isolate()->heap()->read_only_space();
206  DCHECK_LT(V8HeapCompressionScheme::CompressAny(ro_space->top()), 0xfff);
207  USE(ro_space);
208 #endif
209 
210  if (!CreateLateReadOnlyNonJSReceiverMaps()) return false;
211  if (!CreateReadOnlyObjects()) return false;
212 
213  // Order is important. JSReceiver maps must come after all non-JSReceiver maps
214  // in RO space with a sufficiently large gap in address. Currently there are
215  // no JSReceiver instances in RO space.
216  //
217  // See InstanceTypeChecker::kNonJsReceiverMapLimit.
218  if (!CreateLateReadOnlyJSReceiverMaps()) return false;
219 
221 
222 #ifdef DEBUG
223  ReadOnlyRoots roots(isolate());
224  for (auto pos = RootIndex::kFirstReadOnlyRoot;
225  pos <= RootIndex::kLastReadOnlyRoot; ++pos) {
226  DCHECK(roots.is_initialized(pos));
227  }
228  roots.VerifyTypes();
229 #endif
230  return true;
231 }
ReadOnlySpace * read_only_space() const
Definition: heap.h:781
bool CreateLateReadOnlyNonJSReceiverMaps()
static constexpr Tagged_t CompressAny(Address tagged)
#define USE(...)
Definition: macros.h:293

References v8::internal::V8HeapCompressionSchemeImpl< Cage >::CompressAny(), CreateEarlyReadOnlyMapsAndObjects(), CreateImportantReadOnlyObjects(), CreateLateReadOnlyJSReceiverMaps(), CreateLateReadOnlyNonJSReceiverMaps(), CreateReadOnlyApiObjects(), CreateReadOnlyObjects(), v8::internal::DCHECK(), DCHECK_LT, v8::internal::Isolate::heap(), v8::internal::ReadOnlyRoots::is_initialized(), isolate(), v8::internal::kFirstReadOnlyRoot, v8::internal::kLastReadOnlyRoot, read_only_space(), v8::internal::ReadOnlySpace::top(), and USE.

+ Here is the call graph for this function:

◆ CreateReadOnlyObjects()

bool v8::internal::Heap::CreateReadOnlyObjects ( )
private

Definition at line 1044 of file setup-heap-internal.cc.

1044  {
1045  HandleScope initial_objects_handle_scope(isolate());
1046  Factory* factory = isolate()->factory();
1047  ReadOnlyRoots roots(this);
1048  Tagged<HeapObject> obj;
1049 
1050  {
1051  AllocationResult alloc =
1053  if (!alloc.To(&obj)) return false;
1054  obj->set_map_after_allocation(isolate(), roots.array_list_map(),
1056  // Unchecked to skip failing checks since required roots are uninitialized.
1057  UncheckedCast<ArrayList>(obj)->set_capacity(0);
1058  UncheckedCast<ArrayList>(obj)->set_length(0);
1059  }
1060  set_empty_array_list(UncheckedCast<ArrayList>(obj));
1061 
1062  {
1063  AllocationResult alloc = AllocateRaw(
1065  if (!alloc.To(&obj)) return false;
1066  obj->set_map_after_allocation(isolate(),
1067  roots.object_boilerplate_description_map(),
1069 
1070  Cast<ObjectBoilerplateDescription>(obj)->set_capacity(0);
1071  Cast<ObjectBoilerplateDescription>(obj)->set_backing_store_size(0);
1072  Cast<ObjectBoilerplateDescription>(obj)->set_flags(0);
1073  }
1074  set_empty_object_boilerplate_description(
1075  Cast<ObjectBoilerplateDescription>(obj));
1076 
1077  {
1078  // Empty array boilerplate description
1079  AllocationResult alloc =
1080  Allocate(roots_table().array_boilerplate_description_map(),
1082  if (!alloc.To(&obj)) return false;
1083 
1084  Cast<ArrayBoilerplateDescription>(obj)->set_constant_elements(
1085  roots.empty_fixed_array());
1086  Cast<ArrayBoilerplateDescription>(obj)->set_elements_kind(
1088  }
1089  set_empty_array_boilerplate_description(
1090  Cast<ArrayBoilerplateDescription>(obj));
1091 
1092  // Empty arrays.
1093  {
1096  .To(&obj)) {
1097  return false;
1098  }
1099  obj->set_map_after_allocation(
1100  isolate(), roots.closure_feedback_cell_array_map(), SKIP_WRITE_BARRIER);
1101  Cast<ClosureFeedbackCellArray>(obj)->set_length(0);
1102  set_empty_closure_feedback_cell_array(Cast<ClosureFeedbackCellArray>(obj));
1103  }
1104 
1105  DCHECK(!HeapLayout::InYoungGeneration(roots.empty_fixed_array()));
1106 
1107  // Allocate the empty SwissNameDictionary
1108  DirectHandle<SwissNameDictionary> empty_swiss_property_dictionary =
1109  factory->CreateCanonicalEmptySwissNameDictionary();
1110  set_empty_swiss_property_dictionary(*empty_swiss_property_dictionary);
1111  StaticRootsEnsureAllocatedSize(empty_swiss_property_dictionary,
1112  8 * kTaggedSize);
1113 
1114  roots.bigint_map()->SetConstructorFunctionIndex(
1115  Context::BIGINT_FUNCTION_INDEX);
1116 
1117  for (const ConstantStringInit& entry : kNotImportantConstantStringTable) {
1118  DirectHandle<String> str = factory->InternalizeString(entry.contents);
1119  roots_table()[entry.index] = str->ptr();
1120  }
1121 
1122 #define ENSURE_SINGLE_CHAR_STRINGS_ARE_SINGLE_CHAR(_, name, contents) \
1123  static_assert(arraysize(contents) - 1 == 1);
1126  /* not used */)
1127 #undef ENSURE_SINGLE_CHAR_STRINGS_ARE_SINGLE_CHAR
1128 
1129  // Finish initializing oddballs after creating the string table.
1130  Oddball::Initialize(isolate(), factory->undefined_value(), "undefined",
1131  factory->undefined_nan_value(), "undefined",
1133 
1134  // Initialize the null_value.
1135  Oddball::Initialize(isolate(), factory->null_value(), "null",
1136  direct_handle(Smi::zero(), isolate()), "object",
1137  Oddball::kNull);
1138 
1139  // Initialize the true_value.
1140  Oddball::Initialize(isolate(), factory->true_value(), "true",
1141  direct_handle(Smi::FromInt(1), isolate()), "boolean",
1142  Oddball::kTrue);
1143 
1144  // Initialize the false_value.
1145  Oddball::Initialize(isolate(), factory->false_value(), "false",
1146  direct_handle(Smi::zero(), isolate()), "boolean",
1147  Oddball::kFalse);
1148 
1149  // Initialize the_hole_value.
1150  Hole::Initialize(isolate(), factory->the_hole_value(),
1151  factory->hole_nan_value());
1152 
1153  set_property_cell_hole_value(*factory->NewHole());
1154  set_hash_table_hole_value(*factory->NewHole());
1155  set_promise_hole_value(*factory->NewHole());
1156  set_uninitialized_value(*factory->NewHole());
1157  set_arguments_marker(*factory->NewHole());
1158  set_termination_exception(*factory->NewHole());
1159  set_exception(*factory->NewHole());
1160  set_optimized_out(*factory->NewHole());
1161  set_stale_register(*factory->NewHole());
1162 
1163  // Initialize marker objects used during compilation.
1164  set_self_reference_marker(*factory->NewHole());
1165  set_basic_block_counters_marker(*factory->NewHole());
1166 
1167  {
1168  HandleScope handle_scope(isolate());
1170 #undef SYMBOL_INIT
1171  }
1172 
1173  {
1174  HandleScope handle_scope(isolate());
1175 #define PUBLIC_SYMBOL_INIT(_, name, description) \
1176  DirectHandle<Symbol> name = factory->NewSymbol(AllocationType::kReadOnly); \
1177  DirectHandle<String> name##d = factory->InternalizeUtf8String(#description); \
1178  name->set_description(*name##d); \
1179  roots_table()[RootIndex::k##name] = name->ptr();
1180 
1182 
1183 #define WELL_KNOWN_SYMBOL_INIT(_, name, description) \
1184  DirectHandle<Symbol> name = factory->NewSymbol(AllocationType::kReadOnly); \
1185  DirectHandle<String> name##d = factory->InternalizeUtf8String(#description); \
1186  name->set_is_well_known_symbol(true); \
1187  name->set_description(*name##d); \
1188  roots_table()[RootIndex::k##name] = name->ptr();
1189 
1191 
1192  // Mark "Interesting Symbols" appropriately.
1193  to_string_tag_symbol->set_is_interesting_symbol(true);
1194  }
1195 
1196  {
1197  // All Names that can cause protector invalidation have to be allocated
1198  // consecutively to allow for fast checks
1199 
1200  // Allocate the symbols's internal strings first, so we don't get
1201  // interleaved string allocations for the symbols later.
1202 #define ALLOCATE_SYMBOL_STRING(_, name, description) \
1203  Handle<String> name##symbol_string = \
1204  factory->InternalizeUtf8String(#description); \
1205  USE(name##symbol_string);
1206 
1208  /* not used */)
1210  /* not used */)
1212  /* not used */)
1213 #undef ALLOCATE_SYMBOL_STRING
1214 
1215 #define INTERNALIZED_STRING_INIT(_, name, description) \
1216  DirectHandle<String> name = factory->InternalizeUtf8String(description); \
1217  roots_table()[RootIndex::k##name] = name->ptr();
1218 
1220  /* not used */)
1222  /* not used */)
1224  /* not used */)
1226  /* not used */)
1227 
1228  // Mark "Interesting Symbols" appropriately.
1229  to_primitive_symbol->set_is_interesting_symbol(true);
1230 
1231 #ifdef DEBUG
1232  roots.VerifyNameForProtectors();
1233 #endif
1234  roots.VerifyNameForProtectorsPages();
1235 
1236 #undef INTERNALIZED_STRING_INIT
1237 #undef PUBLIC_SYMBOL_INIT
1238 #undef WELL_KNOWN_SYMBOL_INIT
1239  }
1240 
1241  DirectHandle<NumberDictionary> slow_element_dictionary =
1244  DCHECK(!slow_element_dictionary->HasSufficientCapacityToAdd(1));
1245  set_empty_slow_element_dictionary(*slow_element_dictionary);
1246 
1247  DirectHandle<RegisteredSymbolTable> empty_symbol_table =
1250  DCHECK(!empty_symbol_table->HasSufficientCapacityToAdd(1));
1251  set_empty_symbol_table(*empty_symbol_table);
1252 
1253  set_undefined_context_cell(*factory->NewContextCell(
1254  factory->undefined_value(), AllocationType::kReadOnly));
1255 
1256  // Allocate the empty OrderedHashMap.
1257  DirectHandle<OrderedHashMap> empty_ordered_hash_map =
1259  .ToHandleChecked();
1260  set_empty_ordered_hash_map(*empty_ordered_hash_map);
1261 
1262  // Allocate the empty OrderedHashSet.
1263  DirectHandle<OrderedHashSet> empty_ordered_hash_set =
1265  .ToHandleChecked();
1266  set_empty_ordered_hash_set(*empty_ordered_hash_set);
1267 
1268  // Allocate the empty FeedbackMetadata.
1269  DirectHandle<FeedbackMetadata> empty_feedback_metadata =
1270  factory->NewFeedbackMetadata(0, 0, AllocationType::kReadOnly);
1271  set_empty_feedback_metadata(*empty_feedback_metadata);
1272 
1273  // Canonical scope arrays.
1274  DirectHandle<ScopeInfo> global_this_binding =
1276  set_global_this_binding_scope_info(*global_this_binding);
1277 
1278  DirectHandle<ScopeInfo> empty_function =
1280  set_empty_function_scope_info(*empty_function);
1281 
1282  DirectHandle<ScopeInfo> native_scope_info =
1284  set_native_scope_info(*native_scope_info);
1285 
1286  DirectHandle<ScopeInfo> shadow_realm_scope_info =
1288  set_shadow_realm_scope_info(*shadow_realm_scope_info);
1289 
1290  // Allocate FeedbackCell for builtins.
1291  DirectHandle<FeedbackCell> many_closures_cell =
1292  factory->NewManyClosuresCell(AllocationType::kReadOnly);
1293  set_many_closures_cell(*many_closures_cell);
1294 
1295  // Allocate and initialize table for preallocated number strings.
1296  {
1297  HandleScope handle_scope(isolate());
1298  Handle<FixedArray> preallocated_number_string_table =
1299  factory->NewFixedArray(kPreallocatedNumberStringTableSize,
1301 
1302  char arr[16];
1303  base::Vector<char> buffer(arr, arraysize(arr));
1304 
1305  static_assert(kPreallocatedNumberStringTableSize >= 10);
1306  for (int i = 0; i < 10; ++i) {
1308  Tagged<String> str =
1309  Cast<String>(factory->read_only_roots().object_at(root_index));
1311  preallocated_number_string_table->set(i, str);
1312  }
1313 
1314  // This code duplicates FactoryBase::SmiToNumber.
1315  for (int i = 10; i < kPreallocatedNumberStringTableSize; ++i) {
1316  std::string_view string = IntToStringView(i, buffer);
1317  Handle<String> str = factory->InternalizeString(
1318  base::OneByteVector(string.data(), string.length()));
1319 
1321  preallocated_number_string_table->set(i, *str);
1322  }
1323  set_preallocated_number_string_table(*preallocated_number_string_table);
1324  }
1325  // Initialize the wasm null_value.
1326 
1327 #ifdef V8_ENABLE_WEBASSEMBLY
1328  // Allocate the wasm-null object. It is a regular V8 heap object contained in
1329  // a V8 page.
1330  // In static-roots builds, it is large enough so that its payload (other than
1331  // its map word) can be mprotected on OS page granularity. We adjust the
1332  // layout such that we have a filler object in the current OS page, and the
1333  // wasm-null map word at the end of the current OS page. The payload then is
1334  // contained on a separate OS page which can be protected.
1335  // In non-static-roots builds, it is a regular object of size {kTaggedSize}
1336  // and does not need padding.
1337 
1338  constexpr size_t kLargestPossibleOSPageSize = 64 * KB;
1339  static_assert(kLargestPossibleOSPageSize >= kMinimumOSPageSize);
1340 
1342  // Ensure all of the following lands on the same V8 page.
1343  constexpr int kOffsetAfterMapWord = HeapObject::kMapOffset + kTaggedSize;
1344  static_assert(kOffsetAfterMapWord % kObjectAlignment == 0);
1346  kLargestPossibleOSPageSize + WasmNull::kSize - kOffsetAfterMapWord);
1347  Address next_page = RoundUp(read_only_space_->top() + kOffsetAfterMapWord,
1348  kLargestPossibleOSPageSize);
1349 
1350  // Add some filler to end up right before an OS page boundary.
1351  int filler_size = static_cast<int>(next_page - read_only_space_->top() -
1352  kOffsetAfterMapWord);
1353  // TODO(v8:7748) Depending on where we end up this might actually not hold,
1354  // in which case we would need to use a one or two-word filler.
1355  CHECK(filler_size > 2 * kTaggedSize);
1356  Tagged<HeapObject> filler =
1360  CreateFillerObjectAt(filler.address(), filler_size,
1362  set_wasm_null_padding(filler);
1363  CHECK_EQ(read_only_space_->top() + kOffsetAfterMapWord, next_page);
1364  } else {
1365  set_wasm_null_padding(roots.undefined_value());
1366  }
1367 
1368  // Finally, allocate the wasm-null object.
1369  {
1370  Tagged<HeapObject> wasm_null_obj;
1372  .To(&wasm_null_obj));
1373  // No need to initialize the payload since it's either empty or unmapped.
1375  WasmNull::kSize == sizeof(Tagged_t));
1376  wasm_null_obj->set_map_after_allocation(isolate(), roots.wasm_null_map(),
1378  set_wasm_null(Cast<WasmNull>(wasm_null_obj));
1380  CHECK_EQ(read_only_space_->top() % kLargestPossibleOSPageSize, 0);
1381  }
1382  }
1383 #endif
1384 
1385  return true;
1386 }
constexpr int kMinimumOSPageSize
Definition: build_config.h:104
static constexpr int kMapOffset
Definition: heap-object.h:498
V8_EXPORT_PRIVATE void CreateFillerObjectAt(Address addr, int size, ClearFreedMemoryMode clear_memory_mode=ClearFreedMemoryMode::kDontClearFreedMemory)
Definition: heap.cc:3348
ReadOnlySpace * read_only_space_
Definition: heap.h:2215
static void Initialize(Isolate *isolate, DirectHandle< Hole > hole, DirectHandle< HeapNumber > numeric_value)
Definition: hole-inl.h:31
static void Initialize(Isolate *isolate, DirectHandle< Oddball > oddball, const char *to_string, DirectHandle< Number > to_number, const char *type_of, uint8_t kind)
Definition: objects.cc:4312
static MaybeHandle< OrderedHashMap > AllocateEmpty(Isolate *isolate, AllocationType allocation=AllocationType::kReadOnly)
static MaybeHandle< OrderedHashSet > AllocateEmpty(Isolate *isolate, AllocationType allocation=AllocationType::kReadOnly)
void EnsureSpaceForAllocation(int size_in_bytes)
static constexpr RootIndex SingleCharacterStringIndex(int c)
Definition: roots.h:644
static DirectHandle< ScopeInfo > CreateGlobalThisBinding(Isolate *isolate)
Definition: scope-info.cc:519
static DirectHandle< ScopeInfo > CreateForShadowRealmNativeContext(Isolate *isolate)
Definition: scope-info.cc:544
static V8_EXPORT_PRIVATE DirectHandle< ScopeInfo > CreateForEmptyFunction(Isolate *isolate)
Definition: scope-info.cc:524
static DirectHandle< ScopeInfo > CreateForNativeContext(Isolate *isolate)
Definition: scope-info.cc:539
static constexpr int kSize
#define V8_STATIC_ROOTS_GENERATION_BOOL
Definition: globals.h:143
#define WELL_KNOWN_SYMBOL_LIST_GENERATOR(V, _)
Definition: heap-symbols.h:945
#define INTERNALIZED_STRING_FOR_PROTECTOR_LIST_GENERATOR(V, _)
Definition: heap-symbols.h:951
#define PUBLIC_SYMBOL_LIST_GENERATOR(V, _)
Definition: heap-symbols.h:932
#define WELL_KNOWN_SYMBOL_FOR_PROTECTOR_LIST_GENERATOR(V, _)
Definition: heap-symbols.h:971
#define SYMBOL_FOR_PROTECTOR_LIST_GENERATOR(V, _)
Definition: heap-symbols.h:961
#define PUBLIC_SYMBOL_FOR_PROTECTOR_LIST_GENERATOR(V, _)
Definition: heap-symbols.h:968
#define SINGLE_CHARACTER_INTERNALIZED_STRING_LIST_GENERATOR(V_, _)
Definition: heap-symbols.h:436
#define NOT_IMPORTANT_PRIVATE_SYMBOL_LIST_GENERATOR(V, _)
Definition: heap-symbols.h:894
Vector< const uint8_t > OneByteVector(const char *data, size_t length)
Definition: vector.h:336
MaybeDirectHandle< T > New(Isolate *isolate, DirectHandle< JSFunction > constructor, DirectHandle< Object > locales, DirectHandle< Object > options, const char *method_name)
constexpr std::initializer_list< ConstantStringInit > kNotImportantConstantStringTable
constexpr intptr_t kObjectAlignment
Definition: globals.h:924
Address Tagged_t
Definition: globals.h:538
std::string_view IntToStringView(int n, base::Vector< char > buffer)
DirectHandle< T > direct_handle(Tagged< T > object, Isolate *isolate)
Definition: handles-inl.h:143
kInstanceDescriptorsOffset kTransitionsOrPrototypeInfoOffset true
Definition: map-inl.h:69
constexpr int KB
Definition: v8-internal.h:55
@ kTaggedAligned
Definition: globals.h:1566
constexpr int kPreallocatedNumberStringTableSize
Definition: globals.h:2918
#define PUBLIC_SYMBOL_INIT(_, name, description)
#define INTERNALIZED_STRING_INIT(_, name, description)
#define WELL_KNOWN_SYMBOL_INIT(_, name, description)
#define ENSURE_SINGLE_CHAR_STRINGS_ARE_SINGLE_CHAR(_, name, contents)
#define ALLOCATE_SYMBOL_STRING(_, name, description)
#define arraysize(array)
Definition: macros.h:67

References v8::internal::Tagged< HeapObject >::address(), Allocate(), ALLOCATE_SYMBOL_STRING, v8::internal::OrderedHashSet::AllocateEmpty(), v8::internal::OrderedHashMap::AllocateEmpty(), AllocateRaw(), v8::internal::HeapAllocator::AllocateRawWith(), allocator(), arraysize, CHECK, CHECK_EQ, CHECK_IMPLIES, v8::internal::ReadOnlyHeap::Contains(), v8::internal::Factory::CreateCanonicalEmptySwissNameDictionary(), CreateFillerObjectAt(), v8::internal::ScopeInfo::CreateForEmptyFunction(), v8::internal::ScopeInfo::CreateForNativeContext(), v8::internal::ScopeInfo::CreateForShadowRealmNativeContext(), v8::internal::ScopeInfo::CreateGlobalThisBinding(), v8::internal::DCHECK(), v8::internal::direct_handle(), ENSURE_SINGLE_CHAR_STRINGS_ARE_SINGLE_CHAR, v8::internal::ReadOnlySpace::EnsureSpaceForAllocation(), v8::internal::Isolate::factory(), v8::internal::Smi::FromInt(), v8::internal::anonymous_namespace{json-stringifier.cc}::i, v8::internal::Hole::Initialize(), v8::internal::Oddball::Initialize(), INTERNALIZED_STRING_FOR_PROTECTOR_LIST_GENERATOR, INTERNALIZED_STRING_INIT, v8::internal::Factory::InternalizeString(), v8::internal::IntToStringView(), v8::internal::HeapLayout::InYoungGeneration(), isolate(), v8::internal::KB, v8::internal::kClearFreedMemory, v8::internal::Oddball::kFalse, v8::internal::HeapObject::kMapOffset, kMinimumOSPageSize, v8::internal::anonymous_namespace{setup-heap-internal.cc}::kNotImportantConstantStringTable, v8::internal::Oddball::kNull, v8::internal::kObjectAlignment, v8::internal::kPreallocatedNumberStringTableSize, v8::internal::kReadOnly, v8::internal::HeapAllocator::kRetryOrFail, v8::internal::kRuntime, v8::internal::WasmNull::kSize, v8::internal::kTaggedAligned, v8::internal::kTaggedSize, v8::internal::Oddball::kTrue, v8::internal::Oddball::kUndefined, v8::internal::length, v8::internal::anonymous_namespace{intl-objects.cc}::New(), v8::internal::Factory::NewContextCell(), v8::internal::FactoryBase< Impl >::NewFeedbackMetadata(), v8::internal::FactoryBase< Impl >::NewFixedArray(), v8::internal::Factory::NewHole(), v8::internal::Factory::NewManyClosuresCell(), NOT_IMPORTANT_PRIVATE_SYMBOL_LIST_GENERATOR, v8::internal::ReadOnlyRoots::object_at(), v8::base::OneByteVector(), v8::internal::PACKED_SMI_ELEMENTS, PUBLIC_SYMBOL_FOR_PROTECTOR_LIST_GENERATOR, PUBLIC_SYMBOL_INIT, PUBLIC_SYMBOL_LIST_GENERATOR, v8::internal::Factory::read_only_roots(), read_only_space_, roots_table(), RoundUp(), SINGLE_CHARACTER_INTERNALIZED_STRING_LIST_GENERATOR, v8::internal::RootsTable::SingleCharacterStringIndex(), v8::internal::TaggedArrayBase< ArrayList, ArrayListShape >::SizeFor(), v8::internal::TaggedArrayBase< ObjectBoilerplateDescription, ObjectBoilerplateDescriptionShape >::SizeFor(), v8::internal::TaggedArrayBase< ClosureFeedbackCellArray, ClosureFeedbackCellArrayShape >::SizeFor(), v8::internal::SKIP_WRITE_BARRIER, StaticRootsEnsureAllocatedSize(), SYMBOL_FOR_PROTECTOR_LIST_GENERATOR, SYMBOL_INIT, v8::internal::AllocationResult::To(), v8::internal::ReadOnlySpace::top(), v8::internal::USE_CUSTOM_MINIMUM_CAPACITY, V8_STATIC_ROOTS_BOOL, V8_STATIC_ROOTS_GENERATION_BOOL, v8::internal::ReadOnlyRoots::VerifyNameForProtectorsPages(), WELL_KNOWN_SYMBOL_FOR_PROTECTOR_LIST_GENERATOR, WELL_KNOWN_SYMBOL_INIT, WELL_KNOWN_SYMBOL_LIST_GENERATOR, and v8::internal::Smi::zero().

Referenced by CreateReadOnlyHeapObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ current_or_last_garbage_collector()

GarbageCollector v8::internal::Heap::current_or_last_garbage_collector ( ) const
inline

Definition at line 1414 of file heap.h.

1414  {
1416  }

◆ CurrentHeapGrowingMode()

Heap::HeapGrowingMode v8::internal::Heap::CurrentHeapGrowingMode ( )
private

Definition at line 5578 of file heap.cc.

5578  {
5579  if (ShouldReduceMemory() || v8_flags.stress_compaction) {
5581  }
5582 
5585  }
5586 
5587  if (memory_reducer() != nullptr && memory_reducer()->ShouldGrowHeapSlowly()) {
5589  }
5590 
5592 }
MemoryReducer * memory_reducer()
Definition: heap.h:1971
V8_EXPORT_PRIVATE bool ShouldOptimizeForMemoryUsage()
Definition: heap.cc:3898

References kConservative, kDefault, kMinimal, kSlow, memory_reducer(), ShouldOptimizeForMemoryUsage(), ShouldReduceMemory(), and v8::internal::v8_flags.

Referenced by ShrinkOldGenerationAllocationLimitIfNotConfigured().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DeactivateMajorGCInProgressFlag()

void v8::internal::Heap::DeactivateMajorGCInProgressFlag ( )

Definition at line 3756 of file heap.cc.

3756  {
3757  DCHECK(v8_flags.sticky_mark_bits);
3759 
3760  auto deactivate_space = [](auto& space) {
3761  for (auto* metadata : space) {
3762  metadata->Chunk()->ResetMajorGCInProgress();
3763  }
3764  };
3765 
3766  deactivate_space(*old_space());
3767  deactivate_space(*lo_space());
3768 
3769  {
3770  RwxMemoryWriteScope scope("For writing flags.");
3771  deactivate_space(*code_space());
3772  deactivate_space(*code_lo_space());
3773  }
3774 
3775  if (isolate()->is_shared_space_isolate()) {
3776  deactivate_space(*shared_space());
3777  deactivate_space(*shared_lo_space());
3778  }
3779 
3780  deactivate_space(*trusted_space());
3781  deactivate_space(*trusted_lo_space());
3782 }
SharedSpace * shared_space() const
Definition: heap.h:776
TrustedLargeObjectSpace * trusted_lo_space() const
Definition: heap.h:786
TrustedSpace * trusted_space() const
Definition: heap.h:782
CodeSpace * code_space() const
Definition: heap.h:775
CodeLargeObjectSpace * code_lo_space() const
Definition: heap.h:778
OldSpace * old_space() const
Definition: heap.h:773
SharedLargeObjectSpace * shared_lo_space() const
Definition: heap.h:779

References code_lo_space(), code_space(), v8::internal::DCHECK(), DCHECK_NULL, isolate(), lo_space(), new_space(), old_space(), shared_lo_space(), shared_space(), space(), trusted_lo_space(), trusted_space(), and v8::internal::v8_flags.

+ Here is the call graph for this function:

◆ DecrementExternalBackingStoreBytes()

void v8::internal::Heap::DecrementExternalBackingStoreBytes ( ExternalBackingStoreType  type,
size_t  amount 
)
inlineprivate

Definition at line 409 of file heap-inl.h.

410  {
411  base::CheckedDecrement(&backing_store_bytes_, static_cast<uint64_t>(amount),
412  std::memory_order_relaxed);
413 }

References backing_store_bytes_.

Referenced by v8::internal::Space::DecrementExternalBackingStoreBytes(), and v8::internal::ArrayBufferSweeper::DecrementExternalMemoryCounters().

+ Here is the caller graph for this function:

◆ DefaulMaxHeapSize()

size_t v8::internal::Heap::DefaulMaxHeapSize ( )
static

Definition at line 5089 of file heap.cc.

5089 { return 1024u * HeapLimitMultiplier() * MB; }
static V8_EXPORT_PRIVATE size_t HeapLimitMultiplier()
Definition: heap.cc:5031

References HeapLimitMultiplier(), and v8::internal::MB.

Referenced by v8::internal::MemoryController< Trait >::MaxGrowingFactor(), MaxOldGenerationSizeFromPhysicalMemory(), and OldGenerationToSemiSpaceRatio().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DefaulMinHeapSize()

size_t v8::internal::Heap::DefaulMinHeapSize ( )
static

Definition at line 5086 of file heap.cc.

5086 { return 128u * HeapLimitMultiplier() * MB; }

References HeapLimitMultiplier(), and v8::internal::MB.

Referenced by HeapSizeFromPhysicalMemory(), and v8::internal::MemoryController< Trait >::MaxGrowingFactor().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DefaultInitialOldGenerationSize()

size_t v8::internal::Heap::DefaultInitialOldGenerationSize ( )
static

Definition at line 5047 of file heap.cc.

5047  {
5048  return 256 * MB * HeapLimitMultiplier();
5049 }

References HeapLimitMultiplier(), and v8::internal::MB.

Referenced by ConfigureHeap().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DefaultMaxSemiSpaceSize()

size_t v8::internal::Heap::DefaultMaxSemiSpaceSize ( )
static

Definition at line 5062 of file heap.cc.

5062  {
5063  if (v8_flags.minor_ms) {
5064  static constexpr size_t kMinorMsMaxCapacity = 72 * kPointerMultiplier * MB;
5065  return RoundUp(kMinorMsMaxCapacity, PageMetadata::kPageSize);
5066  }
5067 
5068  // Compute default max semi space size for Scavenger.
5069  static constexpr size_t kScavengerDefaultMaxCapacity =
5070  32 * kPointerMultiplier * MB;
5071  size_t max_semi_space_size = kScavengerDefaultMaxCapacity;
5072 
5073 #if defined(ANDROID)
5074  if (!v8_flags.high_end_android) {
5075  // Note that kPointerMultiplier is always 1 on Android.
5076  static constexpr size_t kAndroidNonHighEndMaxCapacity =
5077  8 * kPointerMultiplier * MB;
5078  max_semi_space_size = kAndroidNonHighEndMaxCapacity;
5079  }
5080 #endif
5081 
5082  return RoundUp(max_semi_space_size, PageMetadata::kPageSize);
5083 }
static const int kPointerMultiplier
Definition: heap.h:315

References v8::internal::MutablePageMetadata::kPageSize, kPointerMultiplier, v8::internal::MB, RoundUp(), and v8::internal::v8_flags.

Referenced by AllocateExternalBackingStore(), AllocatorLimitOnMaxOldGenerationSize(), ConfigureHeap(), OldGenerationToSemiSpaceRatio(), and YoungGenerationSizeFromOldGenerationSize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DefaultMinSemiSpaceSize()

size_t v8::internal::Heap::DefaultMinSemiSpaceSize ( )
static

Definition at line 5057 of file heap.cc.

5057  {
5059 }

References v8::internal::KB, v8::internal::MutablePageMetadata::kPageSize, kPointerMultiplier, and RoundUp().

Referenced by ConfigureHeap(), MinYoungGenerationSize(), and YoungGenerationSizeFromOldGenerationSize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DeoptMarkedAllocationSites()

void v8::internal::Heap::DeoptMarkedAllocationSites ( )

Definition at line 1044 of file heap.cc.

1044  {
1045  // TODO(hpayer): If iterating over the allocation sites list becomes a
1046  // performance issue, use a cache data structure in heap instead.
1047 
1049  allocation_sites_list(), [this](Tagged<AllocationSite> site) {
1050  if (site->deopt_dependent_code()) {
1051  DependentCode::MarkCodeForDeoptimization(
1052  isolate_, site,
1053  DependentCode::kAllocationSiteTenuringChangedGroup);
1054  site->set_deopt_dependent_code(false);
1055  }
1056  });
1057 
1059 }
static void DeoptimizeMarkedCode(Isolate *isolate)
Definition: deoptimizer.cc:405
void ForeachAllocationSite(Tagged< Object > list, const std::function< void(Tagged< AllocationSite >)> &visitor)
Definition: heap.cc:3057
Tagged< UnionOf< Smi, Undefined, AllocationSiteWithWeakNext > > allocation_sites_list()
Definition: heap.h:514

References allocation_sites_list(), v8::internal::Deoptimizer::DeoptimizeMarkedCode(), ForeachAllocationSite(), and isolate_.

Referenced by v8::internal::StackGuard::HandleInterrupts().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DequeueDirtyJSFinalizationRegistry()

MaybeDirectHandle< JSFinalizationRegistry > v8::internal::Heap::DequeueDirtyJSFinalizationRegistry ( )

Definition at line 7080 of file heap.cc.

7080  {
7081  // Take a FinalizationRegistry from the head of the dirty list for fairness.
7083  DirectHandle<JSFinalizationRegistry> head(
7084  Cast<JSFinalizationRegistry>(dirty_js_finalization_registries_list()),
7085  isolate());
7086  set_dirty_js_finalization_registries_list(head->next_dirty());
7087  head->set_next_dirty(ReadOnlyRoots(this).undefined_value());
7090  ReadOnlyRoots(this).undefined_value());
7091  }
7092  return head;
7093  }
7094  return {};
7095 }
bool HasDirtyJSFinalizationRegistries()
Definition: heap.cc:7035
Tagged< Object > dirty_js_finalization_registries_list()
Definition: heap.h:521
Tagged< Object > dirty_js_finalization_registries_list_tail()
Definition: heap.h:527

References dirty_js_finalization_registries_list(), dirty_js_finalization_registries_list_tail(), HasDirtyJSFinalizationRegistries(), isolate(), ReadOnlyRoots, set_dirty_js_finalization_registries_list(), and set_dirty_js_finalization_registries_list_tail().

Referenced by v8::internal::FinalizationRegistryCleanupTask::RunInternal().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ deserialization_complete()

bool v8::internal::Heap::deserialization_complete ( ) const
inline

◆ DetachArrayBufferExtension()

void v8::internal::Heap::DetachArrayBufferExtension ( ArrayBufferExtension extension)

Definition at line 4395 of file heap.cc.

4395  {
4396  // ArrayBufferSweeper is managing all counters and updating Heap counters.
4397  return array_buffer_sweeper_->Detach(extension);
4398 }

References array_buffer_sweeper_.

Referenced by v8::internal::JSArrayBuffer::DetachInternal().

+ Here is the caller graph for this function:

◆ dirty_js_finalization_registries_list()

Tagged<Object> v8::internal::Heap::dirty_js_finalization_registries_list ( )
inline

Definition at line 521 of file heap.h.

521  {
523  }
Tagged< Object > dirty_js_finalization_registries_list_
Definition: heap.h:2302

Referenced by v8::internal::StartupSerializer::CheckNoDirtyFinalizationRegistries(), DequeueDirtyJSFinalizationRegistry(), EnqueueDirtyJSFinalizationRegistry(), HasDirtyJSFinalizationRegistries(), ProcessDirtyJSFinalizationRegistries(), ProcessWeakListRoots(), and RemoveDirtyFinalizationRegistriesOnContext().

+ Here is the caller graph for this function:

◆ dirty_js_finalization_registries_list_tail()

Tagged<Object> v8::internal::Heap::dirty_js_finalization_registries_list_tail ( )
inline

Definition at line 527 of file heap.h.

527  {
529  }
Tagged< Object > dirty_js_finalization_registries_list_tail_
Definition: heap.h:2304

Referenced by v8::internal::StartupSerializer::CheckNoDirtyFinalizationRegistries(), DequeueDirtyJSFinalizationRegistry(), EnqueueDirtyJSFinalizationRegistry(), and ProcessWeakListRoots().

+ Here is the caller graph for this function:

◆ DisableInlineAllocation()

void v8::internal::Heap::DisableInlineAllocation ( )

Definition at line 5738 of file heap.cc.

5738  {
5741 }
bool inline_allocation_enabled_
Definition: heap.h:2443

References FreeMainThreadLinearAllocationAreas(), and inline_allocation_enabled_.

Referenced by AddHeapObjectAllocationTracker(), and v8::internal::Isolate::Init().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DumpJSONHeapStatistics()

void v8::internal::Heap::DumpJSONHeapStatistics ( std::stringstream &  stream)

Definition at line 753 of file heap.cc.

753  {
754  HeapStatistics stats;
755  reinterpret_cast<v8::Isolate*>(isolate())->GetHeapStatistics(&stats);
756 
757 // clang-format off
758 #define DICT(s) "{" << s << "}"
759 #define LIST(s) "[" << s << "]"
760 #define QUOTE(s) "\"" << s << "\""
761 #define MEMBER(s) QUOTE(s) << ":"
762 
763  auto SpaceStatistics = [this](int space_index) {
764  HeapSpaceStatistics space_stats;
765  reinterpret_cast<v8::Isolate*>(isolate())->GetHeapSpaceStatistics(
766  &space_stats, space_index);
767  std::stringstream stream;
768  stream << DICT(
769  MEMBER("name")
770  << QUOTE(ToString(
771  static_cast<AllocationSpace>(space_index)))
772  << ","
773  MEMBER("size") << space_stats.space_size() << ","
774  MEMBER("used_size") << space_stats.space_used_size() << ","
775  MEMBER("available_size") << space_stats.space_available_size() << ","
776  MEMBER("physical_size") << space_stats.physical_space_size());
777  return stream.str();
778  };
779 
780  stream << DICT(
781  MEMBER("isolate") << QUOTE(reinterpret_cast<void*>(isolate())) << ","
782  MEMBER("id") << gc_count() << ","
783  MEMBER("time_ms") << isolate()->time_millis_since_init() << ","
784  MEMBER("total_heap_size") << stats.total_heap_size() << ","
785  MEMBER("total_heap_size_executable")
786  << stats.total_heap_size_executable() << ","
787  MEMBER("total_physical_size") << stats.total_physical_size() << ","
788  MEMBER("total_available_size") << stats.total_available_size() << ","
789  MEMBER("used_heap_size") << stats.used_heap_size() << ","
790  MEMBER("heap_size_limit") << stats.heap_size_limit() << ","
791  MEMBER("malloced_memory") << stats.malloced_memory() << ","
792  MEMBER("external_memory") << stats.external_memory() << ","
793  MEMBER("peak_malloced_memory") << stats.peak_malloced_memory() << ","
794  MEMBER("spaces") << LIST(
795  SpaceStatistics(RO_SPACE) << "," <<
796  SpaceStatistics(NEW_SPACE) << "," <<
797  SpaceStatistics(OLD_SPACE) << "," <<
798  SpaceStatistics(CODE_SPACE) << "," <<
799  SpaceStatistics(LO_SPACE) << "," <<
800  SpaceStatistics(CODE_LO_SPACE) << "," <<
801  SpaceStatistics(NEW_LO_SPACE) << "," <<
802  SpaceStatistics(TRUSTED_SPACE) << "," <<
803  SpaceStatistics(TRUSTED_LO_SPACE)));
804 
805 #undef DICT
806 #undef LIST
807 #undef QUOTE
808 #undef MEMBER
809  // clang-format on
810 }
int gc_count() const
Definition: heap.h:1410
#define MEMBER(s)
#define DICT(s)
#define LIST(s)
#define QUOTE(s)

References v8::internal::CODE_LO_SPACE, v8::internal::CODE_SPACE, DICT, v8::HeapStatistics::external_memory(), gc_count(), v8::HeapStatistics::heap_size_limit(), isolate(), LIST, v8::internal::LO_SPACE, v8::HeapStatistics::malloced_memory(), MEMBER, v8::internal::NEW_LO_SPACE, v8::internal::NEW_SPACE, v8::internal::OLD_SPACE, v8::HeapStatistics::peak_malloced_memory(), v8::HeapSpaceStatistics::physical_space_size(), QUOTE, v8::internal::RO_SPACE, v8::HeapSpaceStatistics::space_available_size(), v8::HeapSpaceStatistics::space_size(), v8::HeapSpaceStatistics::space_used_size(), v8::internal::ToString(), v8::HeapStatistics::total_available_size(), v8::HeapStatistics::total_heap_size(), v8::HeapStatistics::total_heap_size_executable(), v8::HeapStatistics::total_physical_size(), v8::internal::TRUSTED_LO_SPACE, v8::internal::TRUSTED_SPACE, and v8::HeapStatistics::used_heap_size().

Referenced by v8::internal::GCTracer::StopObservablePause().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EagerlyFreeExternalMemoryAndWasmCode()

void v8::internal::Heap::EagerlyFreeExternalMemoryAndWasmCode ( )
private

Definition at line 4351 of file heap.cc.

4351  {
4352 #if V8_ENABLE_WEBASSEMBLY
4353  if (v8_flags.flush_liftoff_code) {
4355  }
4356 #endif // V8_ENABLE_WEBASSEMBLY
4358 }
void CompleteArrayBufferSweeping(Heap *heap)
Definition: heap.cc:1996
WasmEngine * GetWasmEngine()

References v8::internal::anonymous_namespace{heap.cc}::CompleteArrayBufferSweeping(), v8::internal::wasm::WasmEngine::FlushLiftoffCode(), v8::internal::wasm::GetWasmEngine(), and v8::internal::v8_flags.

Referenced by CollectAllAvailableGarbage(), and CollectGarbageOnMemoryPressure().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EmbedderAllocationCounter()

size_t v8::internal::Heap::EmbedderAllocationCounter ( ) const

Definition at line 7275 of file heap.cc.

7275  {
7277 }
size_t allocated_size() const
Definition: cpp-heap.h:173

References v8::internal::CppHeap::allocated_size(), cpp_heap_, and v8::internal::CppHeap::From().

Referenced by v8::internal::GCTracer::StartInSafepoint().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EmbedderSizeOfObjects()

size_t v8::internal::Heap::EmbedderSizeOfObjects ( ) const

Definition at line 5419 of file heap.cc.

5419  {
5420  return cpp_heap_ ? CppHeap::From(cpp_heap_)->used_size() : 0;
5421 }
size_t used_size() const
Definition: cpp-heap.h:170

References cpp_heap_, v8::internal::CppHeap::From(), and v8::internal::CppHeap::used_size().

Referenced by GlobalSizeOfObjects(), v8::internal::IncrementalMarking::IsBelowActivationThresholds(), MarkCompact(), and RecomputeLimitsAfterLoadingIfNeeded().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EnableInlineAllocation()

void v8::internal::Heap::EnableInlineAllocation ( )

Definition at line 5736 of file heap.cc.

5736 { inline_allocation_enabled_ = true; }

References inline_allocation_enabled_.

Referenced by RemoveHeapObjectAllocationTracker().

+ Here is the caller graph for this function:

◆ EnqueueDirtyJSFinalizationRegistry()

void v8::internal::Heap::EnqueueDirtyJSFinalizationRegistry ( Tagged< JSFinalizationRegistry finalization_registry,
std::function< void(Tagged< HeapObject > object, ObjectSlot slot, Tagged< Object > target)>  gc_notify_updated_slot 
)

Definition at line 7050 of file heap.cc.

7054  {
7055  // Add a FinalizationRegistry to the tail of the dirty list.
7057  IsJSFinalizationRegistry(dirty_js_finalization_registries_list()));
7058  DCHECK(IsUndefined(finalization_registry->next_dirty(), isolate()));
7059  DCHECK(!finalization_registry->scheduled_for_cleanup());
7060  finalization_registry->set_scheduled_for_cleanup(true);
7061  if (IsUndefined(dirty_js_finalization_registries_list_tail(), isolate())) {
7063  set_dirty_js_finalization_registries_list(finalization_registry);
7064  // dirty_js_finalization_registries_list_ is rescanned by
7065  // ProcessWeakListRoots.
7066  } else {
7067  Tagged<JSFinalizationRegistry> tail = Cast<JSFinalizationRegistry>(
7069  tail->set_next_dirty(finalization_registry);
7070  gc_notify_updated_slot(
7071  tail, tail->RawField(JSFinalizationRegistry::kNextDirtyOffset),
7072  finalization_registry);
7073  }
7074  set_dirty_js_finalization_registries_list_tail(finalization_registry);
7075  // dirty_js_finalization_registries_list_tail_ is rescanned by
7076  // ProcessWeakListRoots.
7077 }

References v8::internal::DCHECK(), dirty_js_finalization_registries_list(), dirty_js_finalization_registries_list_tail(), HasDirtyJSFinalizationRegistries(), isolate(), set_dirty_js_finalization_registries_list(), and set_dirty_js_finalization_registries_list_tail().

Referenced by v8::internal::FinalizationRegistryCleanupTask::RunInternal().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EnsureMinimumRemainingAllocationLimit()

void v8::internal::Heap::EnsureMinimumRemainingAllocationLimit ( size_t  at_least_remaining)
private

Definition at line 3253 of file heap.cc.

3253  {
3254  base::MutexGuard guard(old_space()->mutex());
3255  size_t new_old_generation_allocation_limit =
3256  std::max(OldGenerationConsumedBytes() + at_least_remaining,
3258  new_old_generation_allocation_limit =
3259  std::max(new_old_generation_allocation_limit, min_old_generation_size());
3260  new_old_generation_allocation_limit =
3261  std::min(new_old_generation_allocation_limit, max_old_generation_size());
3262 
3263  size_t new_global_allocation_limit = std::max(
3264  GlobalConsumedBytes() + GlobalMemorySizeFromV8Size(at_least_remaining),
3266  new_global_allocation_limit =
3267  std::max(new_global_allocation_limit, min_global_memory_size_);
3268  new_global_allocation_limit =
3269  std::min(new_global_allocation_limit, max_global_memory_size_);
3270  SetOldGenerationAndGlobalAllocationLimit(new_old_generation_allocation_limit,
3271  new_global_allocation_limit);
3272  // Reset using_initial_limit() to prevent the sweeper from overwriting this
3273  // limit right after this operation.
3275 }
size_t min_old_generation_size() const
Definition: heap.h:2003
void SetOldGenerationAndGlobalAllocationLimit(size_t new_old_generation_allocation_limit, size_t new_global_allocation_limit, const char *reason=__builtin_FUNCTION())
Definition: heap.cc:1558
void set_using_initial_limit(bool value)
Definition: heap.h:1995
LockGuard< Mutex > MutexGuard
Definition: mutex.h:219

References global_allocation_limit(), GlobalConsumedBytes(), v8::internal::anonymous_namespace{heap.cc}::GlobalMemorySizeFromV8Size(), max_global_memory_size_, max_old_generation_size(), min_global_memory_size_, min_old_generation_size(), old_generation_allocation_limit(), old_space(), OldGenerationConsumedBytes(), set_using_initial_limit(), and SetOldGenerationAndGlobalAllocationLimit().

Referenced by NotifyContextDisposed().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EnsureQuarantinedPagesSweepingCompleted()

void v8::internal::Heap::EnsureQuarantinedPagesSweepingCompleted ( )

Definition at line 7548 of file heap.cc.

7548  {
7549  if (v8_flags.minor_ms) {
7550  return;
7551  }
7552  scavenger_collector_->CompleteSweepingQuarantinedPagesIfNeeded();
7553 }
std::unique_ptr< ScavengerCollector > scavenger_collector_
Definition: heap.h:2333

References scavenger_collector_, and v8::internal::v8_flags.

Referenced by EnsureSweepingCompleted(), EnsureYoungSweepingCompleted(), and v8::internal::MarkCompactCollector::MarkObjectsFromClientHeap().

+ Here is the caller graph for this function:

◆ EnsureSweepingCompleted()

void v8::internal::Heap::EnsureSweepingCompleted ( SweepingForcedFinalizationMode  mode)

Definition at line 7474 of file heap.cc.

7474  {
7476 
7478 
7479  if (sweeper()->sweeping_in_progress()) {
7480  bool was_minor_sweeping_in_progress = minor_sweeping_in_progress();
7481  bool was_major_sweeping_in_progress = major_sweeping_in_progress();
7483 
7484  if (was_major_sweeping_in_progress) {
7485  TRACE_GC_EPOCH_WITH_FLOW(tracer(), GCTracer::Scope::MC_COMPLETE_SWEEPING,
7487  sweeper_->GetTraceIdForFlowEvent(
7488  GCTracer::Scope::MC_COMPLETE_SWEEPING),
7492  if (shared_space()) {
7495  }
7496 
7498  } else if (v8_flags.sticky_mark_bits) {
7499  // With sticky markbits there is no separate young gen. Minor sweeping
7500  // will thus sweep pages in old space, so old space freelist should be
7501  // refilled.
7502  DCHECK(was_minor_sweeping_in_progress);
7504  }
7505 
7506  if (!v8_flags.sticky_mark_bits && v8_flags.minor_ms && use_new_space() &&
7507  was_minor_sweeping_in_progress) {
7509  tracer(), GCTracer::Scope::MINOR_MS_COMPLETE_SWEEPING,
7511  sweeper_->GetTraceIdForFlowEvent(
7512  GCTracer::Scope::MINOR_MS_COMPLETE_SWEEPING),
7515  }
7516 
7518 
7519 #ifdef VERIFY_HEAP
7520  if (v8_flags.verify_heap) {
7521  EvacuationVerifier verifier(this);
7522  verifier.Run();
7523  }
7524 #endif
7525  }
7526 
7528  // Ensure that sweeping is also completed for the C++ managed heap, if one
7529  // exists.
7531  DCHECK(!CppHeap::From(cpp_heap())->sweeper().IsSweepingInProgress());
7532  }
7533 
7536  !tracer()->IsSweepingInProgress());
7537 
7538  if (v8_flags.external_memory_accounted_in_global_limit) {
7539  if (!using_initial_limit()) {
7540  auto new_limits = ComputeNewAllocationLimits(this);
7542  new_limits.old_generation_allocation_limit,
7543  new_limits.global_allocation_limit);
7544  }
7545  }
7546 }
void FinishSweepingIfRunning()
Definition: cpp-heap.cc:1239
void NotifyFullSweepingCompletedAndStopCycleIfFinished()
Definition: gc-tracer.cc:539
bool use_new_space() const
Definition: heap.h:1699
std::unique_ptr< Sweeper > sweeper_
Definition: heap.h:2330
SharedTrustedSpace * shared_trusted_space() const
Definition: heap.h:783
bool major_sweeping_in_progress() const
Definition: heap.h:1598
bool using_initial_limit() const
Definition: heap.h:1991
PagedNewSpace * paged_new_space() const
Definition: heap-inl.h:426
static LimitsCompuatationResult ComputeNewAllocationLimits(Heap *heap)
Definition: heap.cc:2561
void EnsureQuarantinedPagesSweepingCompleted()
Definition: heap.cc:7548
bool minor_sweeping_in_progress() const
Definition: heap.h:1595
PagedSpaceForNewSpace * paged_space()
Definition: new-spaces.h:718
void EnsureMajorCompleted()
Definition: sweeper.cc:891
#define TRACE_GC_EPOCH_WITH_FLOW(tracer, scope_id, thread_kind, bind_id, flow_flags)
Definition: gc-tracer.h:84
#define TRACE_EVENT_FLAG_FLOW_OUT
#define TRACE_EVENT_FLAG_FLOW_IN

References code_space(), v8::internal::anonymous_namespace{heap.cc}::CompleteArrayBufferSweeping(), ComputeNewAllocationLimits(), cpp_heap(), v8::internal::DCHECK(), DCHECK_IMPLIES, v8::internal::Sweeper::EnsureMajorCompleted(), EnsureQuarantinedPagesSweepingCompleted(), v8::internal::CppHeap::FinishSweepingIfRunning(), v8::internal::CppHeap::From(), v8::internal::kMain, kUnifiedHeap, major_sweeping_in_progress(), minor_sweeping_in_progress(), mode(), v8::internal::GCTracer::NotifyFullSweepingCompletedAndStopCycleIfFinished(), old_space(), paged_new_space(), v8::internal::PagedNewSpace::paged_space(), v8::internal::PagedSpaceBase::RefillFreeList(), SetOldGenerationAndGlobalAllocationLimit(), shared_space(), shared_trusted_space(), sweeper(), sweeper_, sweeping_in_progress(), TRACE_EVENT_FLAG_FLOW_IN, TRACE_EVENT_FLAG_FLOW_OUT, TRACE_GC_EPOCH_WITH_FLOW, tracer(), trusted_space(), use_new_space(), using_initial_limit(), and v8::internal::v8_flags.

Referenced by CompleteSweepingFull(), FinishSweepingIfOutOfWork(), MakeHeapIterable(), and v8::internal::ReadOnlyHeap::OnCreateHeapObjectsComplete().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EnsureSweepingCompletedForObject()

void v8::internal::Heap::EnsureSweepingCompletedForObject ( Tagged< HeapObject object)

Definition at line 2543 of file heap.cc.

2543  {
2544  if (!sweeping_in_progress()) return;
2545 
2546  MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
2547  if (chunk->InReadOnlySpace()) return;
2548 
2549  MutablePageMetadata* mutable_page =
2550  MutablePageMetadata::cast(chunk->Metadata());
2551  if (mutable_page->SweepingDone()) return;
2552 
2553  // SweepingDone() is always true for large pages.
2554  DCHECK(!chunk->IsLargePage());
2555 
2556  PageMetadata* page = PageMetadata::cast(mutable_page);
2557  sweeper()->EnsurePageIsSwept(page);
2558 }
static MutablePageMetadata * cast(MemoryChunkMetadata *metadata)
void EnsurePageIsSwept(PageMetadata *page)
Definition: sweeper.cc:1305

References v8::internal::MutablePageMetadata::cast(), v8::internal::PageMetadata::cast(), v8::internal::DCHECK(), v8::internal::Sweeper::EnsurePageIsSwept(), v8::internal::MemoryChunk::FromHeapObject(), v8::internal::MemoryChunk::InReadOnlySpace(), v8::internal::MemoryChunk::IsLargePage(), v8::internal::MemoryChunk::Metadata(), sweeper(), sweeping_in_progress(), and v8::internal::MutablePageMetadata::SweepingDone().

Referenced by v8::internal::TranslatedState::InitializeJSObjectAt(), v8::internal::TranslatedState::InitializeObjectWithTaggedFieldsAt(), and v8::internal::FoldedMutableHeapNumberAllocator::~FoldedMutableHeapNumberAllocator().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EnsureYoungSweepingCompleted()

void v8::internal::Heap::EnsureYoungSweepingCompleted ( )

Definition at line 7555 of file heap.cc.

7555  {
7557 
7559 
7560  if (!sweeper()->minor_sweeping_in_progress()) return;
7561  DCHECK(!v8_flags.sticky_mark_bits);
7562 
7564  tracer(), GCTracer::Scope::MINOR_MS_COMPLETE_SWEEPING, ThreadKind::kMain,
7565  sweeper_->GetTraceIdForFlowEvent(
7566  GCTracer::Scope::MINOR_MS_COMPLETE_SWEEPING),
7568 
7571 
7573 }
void NotifyYoungSweepingCompletedAndStopCycleIfFinished()
Definition: gc-tracer.cc:593
void EnsureMinorCompleted()
Definition: sweeper.cc:962

References v8::internal::anonymous_namespace{heap.cc}::CompleteArrayBufferSweeping(), v8::internal::DCHECK(), v8::internal::Sweeper::EnsureMinorCompleted(), EnsureQuarantinedPagesSweepingCompleted(), v8::internal::kMain, minor_sweeping_in_progress(), v8::internal::GCTracer::NotifyYoungSweepingCompletedAndStopCycleIfFinished(), paged_new_space(), v8::internal::PagedNewSpace::paged_space(), v8::internal::PagedSpaceBase::RefillFreeList(), sweeper(), sweeper_, TRACE_EVENT_FLAG_FLOW_IN, TRACE_GC_EPOCH_WITH_FLOW, tracer(), and v8::internal::v8_flags.

Referenced by CompleteSweepingYoung().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ephemeron_remembered_set()

EphemeronRememberedSet* v8::internal::Heap::ephemeron_remembered_set ( )
inline

Definition at line 404 of file heap.h.

404  {
405  return ephemeron_remembered_set_.get();
406  }
std::unique_ptr< EphemeronRememberedSet > ephemeron_remembered_set_
Definition: heap.h:2347

Referenced by v8::internal::MinorMarkSweepCollector::ClearNonLiveReferences(), and v8::internal::WriteBarrier::CombinedGenerationalAndSharedEphemeronBarrierSlow().

+ Here is the caller graph for this function:

◆ EvaluateOldSpaceLocalPretenuring()

void v8::internal::Heap::EvaluateOldSpaceLocalPretenuring ( uint64_t  size_of_objects_before_gc)
private

Definition at line 3094 of file heap.cc.

3095  {
3096  uint64_t size_of_objects_after_gc = SizeOfObjects();
3097  double old_generation_survival_rate =
3098  (static_cast<double>(size_of_objects_after_gc) * 100) /
3099  static_cast<double>(size_of_objects_before_gc);
3100 
3101  if (old_generation_survival_rate < kOldSurvivalRateLowThreshold) {
3102  // Too many objects died in the old generation, pretenuring of wrong
3103  // allocation sites may be the cause for that. We have to deopt all
3104  // dependent code registered in the allocation sites to re-evaluate
3105  // our pretenuring decisions.
3107  if (v8_flags.trace_pretenuring) {
3108  PrintF(
3109  "Deopt all allocation sites dependent code due to low survival "
3110  "rate in the old generation %f\n",
3111  old_generation_survival_rate);
3112  }
3113  }
3114 }
static const int kOldSurvivalRateLowThreshold
Definition: heap.h:1766
void ResetAllAllocationSitesDependentCode(AllocationType allocation)
Definition: heap.cc:3076
void PrintF(const char *format,...)
Definition: utils.cc:39

References v8::internal::kOld, kOldSurvivalRateLowThreshold, v8::internal::PrintF(), ResetAllAllocationSitesDependentCode(), SizeOfObjects(), and v8::internal::v8_flags.

Referenced by MarkCompact().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ExpandNewSpaceSize()

void v8::internal::Heap::ExpandNewSpaceSize ( )
private

Definition at line 4009 of file heap.cc.

4009  {
4010  // Grow the size of new space if there is room to grow, and enough data
4011  // has survived scavenge since the last expansion.
4012  const size_t suggested_capacity =
4013  static_cast<size_t>(v8_flags.semi_space_growth_factor) *
4015  const size_t chosen_capacity =
4016  std::min(suggested_capacity, new_space_->MaximumCapacity());
4017  DCHECK(IsAligned(chosen_capacity, PageMetadata::kPageSize));
4018 
4019  if (chosen_capacity > new_space_->TotalCapacity()) {
4020  new_space_->Grow(chosen_capacity);
4021  new_lo_space()->SetCapacity(new_space()->TotalCapacity());
4022  }
4023 }
void SetCapacity(size_t capacity)
virtual void Grow(size_t new_capacity)=0
virtual size_t MaximumCapacity() const =0
virtual size_t TotalCapacity() const =0

References v8::internal::DCHECK(), v8::internal::NewSpace::Grow(), IsAligned(), v8::internal::MutablePageMetadata::kPageSize, v8::internal::NewSpace::MaximumCapacity(), new_lo_space(), new_space(), new_space_, v8::internal::NewLargeObjectSpace::SetCapacity(), v8::internal::NewSpace::TotalCapacity(), and v8::internal::v8_flags.

Referenced by ExpandNewSpaceSizeForTesting(), and ResizeNewSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ExpandNewSpaceSizeForTesting()

void v8::internal::Heap::ExpandNewSpaceSizeForTesting ( )

Definition at line 4007 of file heap.cc.

4007 { ExpandNewSpaceSize(); }
void ExpandNewSpaceSize()
Definition: heap.cc:4009

References ExpandNewSpaceSize().

+ Here is the call graph for this function:

◆ external_memory()

uint64_t v8::internal::Heap::external_memory ( ) const
inline

Definition at line 67 of file heap-inl.h.

67 { return external_memory_.total(); }

References external_memory_, and v8::internal::Heap::ExternalMemoryAccounting::total().

Referenced by CollectGarbageOnMemoryPressure(), v8::ExternalMemoryAccounter::GetTotalAmountOfExternalAllocatedMemoryForTesting(), GlobalSizeOfObjects(), HandleExternalMemoryInterrupt(), PrintShortHeapStatistics(), and v8::internal::GCTracer::RecordGCSizeCounters().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ external_memory_hard_limit()

uint64_t v8::internal::Heap::external_memory_hard_limit ( )
inline

Definition at line 655 of file heap.h.

655  {
658  }

Referenced by HandleExternalMemoryInterrupt().

+ Here is the caller graph for this function:

◆ external_memory_limit_for_interrupt()

uint64_t v8::internal::Heap::external_memory_limit_for_interrupt ( )

Definition at line 1506 of file heap.cc.

1506  {
1508 }

References external_memory_, and v8::internal::Heap::ExternalMemoryAccounting::limit_for_interrupt().

Referenced by v8::Isolate::AdjustAmountOfExternalAllocatedMemoryImpl().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ external_memory_soft_limit()

uint64_t v8::internal::Heap::external_memory_soft_limit ( )

Definition at line 1510 of file heap.cc.

1510  {
1511  return external_memory_.soft_limit();
1512 }

References external_memory_, and v8::internal::Heap::ExternalMemoryAccounting::soft_limit().

+ Here is the call graph for this function:

◆ FatalProcessOutOfMemory()

void v8::internal::Heap::FatalProcessOutOfMemory ( const char *  location)

Definition at line 6584 of file heap.cc.

6584  {
6586 }
static V8_EXPORT_PRIVATE void FatalProcessOutOfMemory(Isolate *isolate, const char *location, const OOMDetails &details=kNoOOMDetails)
static V8_EXPORT_PRIVATE const OOMDetails kHeapOOM
Definition: v8.h:37

References v8::internal::V8::FatalProcessOutOfMemory(), isolate(), and v8::internal::V8::kHeapOOM.

Referenced by v8::internal::EvacuateNewSpaceVisitor::AllocateInOldSpace(), v8::internal::ReadOnlySpace::AllocateNextPageAt(), CheckHeapLimitReached(), CheckIneffectiveMarkCompact(), CollectGarbage(), CollectGarbageShared(), v8::internal::Scavenger::EvacuateObjectDefault(), v8::internal::JSTypedArray::GetBuffer(), KeepDuringJob(), and v8::internal::EvacuateNewSpaceVisitor::Visit().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FinalizeExternalString()

void v8::internal::Heap::FinalizeExternalString ( Tagged< String string)
inline

Definition at line 219 of file heap-inl.h.

219  {
220  DCHECK(IsExternalString(string));
221  Tagged<ExternalString> ext_string = Cast<ExternalString>(string);
223  page->DecrementExternalBackingStoreBytes(
225  ext_string->ExternalPayloadSize());
226  ext_string->DisposeResource(isolate());
227 }
static PageMetadata * FromHeapObject(Tagged< HeapObject > o)
constexpr bool IsExternalString(InstanceType instance_type)

References v8::internal::DCHECK(), v8::internal::MutablePageMetadata::DecrementExternalBackingStoreBytes(), v8::internal::PageMetadata::FromHeapObject(), v8::internal::InstanceTypeChecker::IsExternalString(), isolate(), and v8::internal::kExternalString.

Referenced by v8::internal::anonymous_namespace{string.cc}::MigrateExternalString(), and v8::internal::anonymous_namespace{string.cc}::MigrateExternalStringResource().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FinalizeIncrementalMarkingAtomically()

void v8::internal::Heap::FinalizeIncrementalMarkingAtomically ( GarbageCollectionReason  gc_reason)

Definition at line 4058 of file heap.cc.

4059  {
4060  DCHECK(!incremental_marking()->IsStopped());
4062 }

References CollectAllGarbage(), current_gc_callback_flags_, current_gc_flags_, v8::internal::DCHECK(), and incremental_marking().

Referenced by v8::internal::IncrementalMarking::AdvanceAndFinalizeIfComplete(), v8::internal::IncrementalMarking::AdvanceAndFinalizeIfNecessary(), FinalizeIncrementalMarkingAtomicallyIfRunning(), and v8::internal::CppHeap::StartDetachingIsolate().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FinalizeIncrementalMarkingAtomicallyIfRunning()

void v8::internal::Heap::FinalizeIncrementalMarkingAtomicallyIfRunning ( GarbageCollectionReason  gc_reason)

Definition at line 4064 of file heap.cc.

4065  {
4066  if (!incremental_marking()->IsStopped()) {
4068  }
4069 }
V8_EXPORT_PRIVATE void FinalizeIncrementalMarkingAtomically(GarbageCollectionReason gc_reason)
Definition: heap.cc:4058

References FinalizeIncrementalMarkingAtomically(), and incremental_marking().

Referenced by PreciseCollectAllGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FinalizePartialMap()

void v8::internal::Heap::FinalizePartialMap ( Tagged< Map map)
private

Definition at line 318 of file setup-heap-internal.cc.

318  {
319  ReadOnlyRoots roots(this);
320  map->set_dependent_code(DependentCode::empty_dependent_code(roots));
321  map->set_raw_transitions(Smi::zero());
322  map->SetInstanceDescriptors(isolate(), roots.empty_descriptor_array(), 0,
324  map->init_prototype_and_constructor_or_back_pointer(roots);
325 }
static V8_EXPORT_PRIVATE Tagged< DependentCode > empty_dependent_code(const ReadOnlyRoots &roots)

References v8::internal::DependentCode::empty_dependent_code(), isolate(), v8::internal::SKIP_WRITE_BARRIER, and v8::internal::Smi::zero().

Referenced by CreateEarlyReadOnlyMapsAndObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FindAllNativeContexts()

std::vector< Handle< NativeContext > > v8::internal::Heap::FindAllNativeContexts ( )
private

Definition at line 7203 of file heap.cc.

7203  {
7204  std::vector<Handle<NativeContext>> result;
7206  while (!IsUndefined(context, isolate())) {
7207  Tagged<NativeContext> native_context = Cast<NativeContext>(context);
7208  result.push_back(handle(native_context, isolate()));
7209  context = native_context->next_context_link();
7210  }
7211  return result;
7212 }
Tagged< Object > native_contexts_list() const
Definition: heap.h:504
!IsContextMap !IsContextMap Tagged< NativeContext >
Definition: map-inl.h:896
!IsContextMap !IsContextMap native_context
Definition: map-inl.h:896

References v8::internal::handle(), isolate(), v8::internal::native_context, native_contexts_list(), and v8::base::internal::result.

Referenced by MeasureMemory(), and v8::internal::MarkCompactCollector::StartMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FindAllRetainedMaps()

std::vector< Tagged< WeakArrayList > > v8::internal::Heap::FindAllRetainedMaps ( )
private

Definition at line 7214 of file heap.cc.

7214  {
7215  std::vector<Tagged<WeakArrayList>> result;
7217  while (!IsUndefined(context, isolate())) {
7218  Tagged<NativeContext> native_context = Cast<NativeContext>(context);
7219  result.push_back(Cast<WeakArrayList>(native_context->retained_maps()));
7220  context = native_context->next_context_link();
7221  }
7222  return result;
7223 }

References isolate(), v8::internal::native_context, native_contexts_list(), and v8::base::internal::result.

+ Here is the call graph for this function:

◆ FindCodeForInnerPointer()

Tagged< Code > v8::internal::Heap::FindCodeForInnerPointer ( Address  inner_pointer)

Definition at line 7352 of file heap.cc.

7352  {
7353  return GcSafeFindCodeForInnerPointer(inner_pointer)->UnsafeCastToCode();
7354 }
Tagged< GcSafeCode > GcSafeFindCodeForInnerPointer(Address inner_pointer)
Definition: heap.cc:7356

References GcSafeFindCodeForInnerPointer().

Referenced by v8::internal::Deoptimizer::Deoptimizer(), and v8::internal::anonymous_namespace{frames.cc}::IsInterpreterFramePc().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FinishSweepingIfOutOfWork()

void v8::internal::Heap::FinishSweepingIfOutOfWork ( )

Definition at line 7456 of file heap.cc.

7456  {
7458  sweeper()->UsingMajorSweeperTasks() &&
7459  !sweeper()->AreMajorSweeperTasksRunning()) {
7460  // At this point we know that all concurrent sweeping tasks have run
7461  // out of work and quit: all pages are swept. The main thread still needs
7462  // to complete sweeping though.
7464  !sweeper()->HasUnsweptPagesForMajorSweeping());
7466  }
7467  if (cpp_heap()) {
7468  // Ensure that sweeping is also completed for the C++ managed heap, if one
7469  // exists and it's out of work.
7471  }
7472 }
void FinishSweepingIfOutOfWork()
Definition: cpp-heap.cc:1255
bool delay_sweeper_tasks_for_testing_
Definition: heap.h:2450

References cpp_heap(), DCHECK_IMPLIES, delay_sweeper_tasks_for_testing_, EnsureSweepingCompleted(), v8::internal::CppHeap::FinishSweepingIfOutOfWork(), v8::internal::CppHeap::From(), kV8Only, major_sweeping_in_progress(), and sweeper().

Referenced by CompleteSweepingYoung().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ force_oom()

bool v8::internal::Heap::force_oom ( ) const
inline

Definition at line 575 of file heap.h.

575 { return force_oom_; }

Referenced by v8::internal::PagedSpaceAllocatorPolicy::RefillLab().

+ Here is the caller graph for this function:

◆ ForeachAllocationSite()

void v8::internal::Heap::ForeachAllocationSite ( Tagged< Object list,
const std::function< void(Tagged< AllocationSite >)> &  visitor 
)

Definition at line 3057 of file heap.cc.

3059  {
3061  Tagged<Object> current = list;
3062  while (IsAllocationSite(current)) {
3063  Tagged<AllocationSiteWithWeakNext> site =
3064  Cast<AllocationSiteWithWeakNext>(current);
3065  visitor(site);
3066  Tagged<Object> current_nested = site->nested_site();
3067  while (IsAllocationSite(current_nested)) {
3068  Tagged<AllocationSite> nested_site = Cast<AllocationSite>(current_nested);
3069  visitor(nested_site);
3070  current_nested = nested_site->nested_site();
3071  }
3072  current = site->weak_next();
3073  }
3074 }

References v8::internal::anonymous_namespace{json-stringifier.cc}::no_gc.

Referenced by DeoptMarkedAllocationSites(), v8::internal::PretenuringHandler::ProcessPretenuringFeedback(), and ResetAllAllocationSitesDependentCode().

+ Here is the caller graph for this function:

◆ FreeLinearAllocationAreas()

void v8::internal::Heap::FreeLinearAllocationAreas ( )

Definition at line 3681 of file heap.cc.

3681  {
3683 
3685  [](LocalHeap* local_heap) { local_heap->FreeLinearAllocationAreas(); });
3686 
3687  if (isolate()->is_shared_space_isolate()) {
3689  [](Isolate* client) { client->heap()->FreeLinearAllocationAreas(); });
3690  }
3691 }
void IterateClientIsolates(Callback callback)
Definition: safepoint.h:190
V8_EXPORT_PRIVATE void FreeLinearAllocationAreas()
Definition: heap.cc:3681
IsolateSafepoint * safepoint()
Definition: heap.h:622
friend class LocalHeap
Definition: heap.h:2502
void IterateLocalHeaps(Callback callback)
Definition: safepoint.h:37
GlobalSafepoint * global_safepoint() const
Definition: isolate.h:2345

References v8::internal::LocalHeap::FreeLinearAllocationAreas(), FreeMainThreadLinearAllocationAreas(), v8::internal::Isolate::global_safepoint(), v8::internal::Isolate::heap(), isolate(), v8::internal::GlobalSafepoint::IterateClientIsolates(), v8::internal::IsolateSafepoint::IterateLocalHeaps(), and safepoint().

Referenced by PerformGarbageCollection(), and v8::internal::IncrementalMarking::StartMarkingMajor().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FreeMainThreadLinearAllocationAreas()

void v8::internal::Heap::FreeMainThreadLinearAllocationAreas ( )

Definition at line 3693 of file heap.cc.

3693  {
3695 }

References allocator(), and v8::internal::HeapAllocator::FreeLinearAllocationAreas().

Referenced by AddAllocationObserversToAllSpaces(), DisableInlineAllocation(), FreeLinearAllocationAreas(), NotifyDeserializationComplete(), and StartTearDown().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FreeSharedLinearAllocationAreasAndResetFreeLists()

void v8::internal::Heap::FreeSharedLinearAllocationAreasAndResetFreeLists ( )
private

Definition at line 3716 of file heap.cc.

3716  {
3717  DCHECK(v8_flags.black_allocated_pages);
3720 
3721  safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) {
3722  local_heap->FreeSharedLinearAllocationAreasAndResetFreeLists();
3723  });
3724 }
void FreeSharedLinearAllocationAreasAndResetFreeLists()
void FreeSharedLinearAllocationAreasAndResetFreeLists()
Definition: local-heap.cc:449

References allocator(), v8::internal::DCHECK(), v8::internal::HeapAllocator::FreeSharedLinearAllocationAreasAndResetFreeLists(), v8::internal::LocalHeap::FreeSharedLinearAllocationAreasAndResetFreeLists(), v8::internal::IsolateSafepoint::IterateLocalHeaps(), main_thread_local_heap(), safepoint(), and v8::internal::v8_flags.

+ Here is the call graph for this function:

◆ FRIEND_TEST() [1/2]

v8::internal::Heap::FRIEND_TEST ( SpacesTest  ,
AllocationObserver   
)
private

◆ FRIEND_TEST() [2/2]

v8::internal::Heap::FRIEND_TEST ( SpacesTest  ,
InlineAllocationObserverCadence   
)
private

◆ FromWritableHeapObject()

Heap * v8::internal::Heap::FromWritableHeapObject ( Tagged< HeapObject obj)
inlinestatic

Definition at line 281 of file heap-inl.h.

281  {
282  MemoryChunkMetadata* chunk = MemoryChunkMetadata::FromHeapObject(obj);
283  // RO_SPACE can be shared between heaps, so we can't use RO_SPACE objects to
284  // find a heap. The exception is when the ReadOnlySpace is writeable, during
285  // bootstrapping, so explicitly allow this case.
286  SLOW_DCHECK(chunk->IsWritable());
287  Heap* heap = chunk->heap();
288  SLOW_DCHECK(heap != nullptr);
289  return heap;
290 }
#define SLOW_DCHECK(condition)
Definition: checks.h:21
static MemoryChunkMetadata * FromHeapObject(Tagged< HeapObject > o)

References v8::internal::MemoryChunkMetadata::FromHeapObject(), v8::internal::MemoryChunkMetadata::heap(), heap(), v8::internal::MemoryChunkMetadata::IsWritable(), and SLOW_DCHECK.

Referenced by v8::internal::WriteBarrier::PageFlagsAreConsistent().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GarbageCollectionEpilogue()

void v8::internal::Heap::GarbageCollectionEpilogue ( GarbageCollector  collector)
private

Definition at line 1163 of file heap.cc.

1163  {
1164  TRACE_GC(tracer(), GCTracer::Scope::HEAP_EPILOGUE);
1165  AllowGarbageCollection for_the_rest_of_the_epilogue;
1166 
1168 
1169  isolate_->counters()->alive_after_last_gc()->Set(
1170  static_cast<int>(SizeOfObjects()));
1171 
1172  if (CommittedMemory() > 0) {
1173  isolate_->counters()->external_fragmentation_total()->AddSample(
1174  static_cast<int>(100 - (SizeOfObjects() * 100.0) / CommittedMemory()));
1175 
1176  isolate_->counters()->heap_sample_total_committed()->AddSample(
1177  static_cast<int>(CommittedMemory() / KB));
1178  isolate_->counters()->heap_sample_total_used()->AddSample(
1179  static_cast<int>(SizeOfObjects() / KB));
1180  isolate_->counters()->heap_sample_code_space_committed()->AddSample(
1181  static_cast<int>(code_space()->CommittedMemory() / KB));
1182 
1183  isolate_->counters()->heap_sample_maximum_committed()->AddSample(
1184  static_cast<int>(MaximumCommittedMemory() / KB));
1185  }
1186 
1187 #ifdef DEBUG
1189  if (v8_flags.code_stats) ReportCodeStatistics("After GC");
1190 #endif // DEBUG
1191 
1193 }
double last_gc_time_
Definition: heap.h:2327
void ReportStatisticsAfterGC()
Definition: heap.cc:812
size_t MaximumCommittedMemory()
Definition: heap.h:1337
void UpdateMaximumCommitted()
Definition: heap.cc:391
Counters * counters()
Definition: isolate.h:1195
PerThreadAssertScopeDebugOnly< true, SAFEPOINTS_ASSERT, HEAP_ALLOCATION_ASSERT > AllowGarbageCollection
Definition: assert-scope.h:271

References code_space(), CommittedMemory(), v8::internal::Isolate::counters(), isolate_, v8::internal::KB, last_gc_time_, MaximumCommittedMemory(), MonotonicallyIncreasingTimeInMs(), ReportStatisticsAfterGC(), SizeOfObjects(), TRACE_GC, tracer(), UpdateMaximumCommitted(), and v8::internal::v8_flags.

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GarbageCollectionEpilogueInSafepoint()

void v8::internal::Heap::GarbageCollectionEpilogueInSafepoint ( GarbageCollector  collector)
private

Definition at line 1074 of file heap.cc.

1074  {
1075  TRACE_GC(tracer(), GCTracer::Scope::HEAP_EPILOGUE_SAFEPOINT);
1076 
1077  {
1078  // Allows handle derefs for all threads/isolates from this thread.
1079  AllowHandleUsageOnAllThreads allow_all_handle_derefs;
1080  safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) {
1081  local_heap->InvokeGCEpilogueCallbacksInSafepoint(
1082  GCCallbacksInSafepoint::GCType::kLocal);
1083  });
1084 
1085  if (collector == GarbageCollector::MARK_COMPACTOR &&
1086  isolate()->is_shared_space_isolate()) {
1088  client->heap()->safepoint()->IterateLocalHeaps(
1089  [](LocalHeap* local_heap) {
1090  local_heap->InvokeGCEpilogueCallbacksInSafepoint(
1092  });
1093  });
1094  }
1095  }
1096 
1097 #define UPDATE_COUNTERS_FOR_SPACE(space) \
1098  isolate_->counters()->space##_bytes_available()->Set( \
1099  static_cast<int>(space()->Available())); \
1100  isolate_->counters()->space##_bytes_committed()->Set( \
1101  static_cast<int>(space()->CommittedMemory())); \
1102  isolate_->counters()->space##_bytes_used()->Set( \
1103  static_cast<int>(space()->SizeOfObjects()));
1104 #define UPDATE_FRAGMENTATION_FOR_SPACE(space) \
1105  if (space()->CommittedMemory() > 0) { \
1106  isolate_->counters()->external_fragmentation_##space()->AddSample( \
1107  static_cast<int>(100 - (space()->SizeOfObjects() * 100.0) / \
1108  space()->CommittedMemory())); \
1109  }
1110 #define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space) \
1111  UPDATE_COUNTERS_FOR_SPACE(space) \
1112  UPDATE_FRAGMENTATION_FOR_SPACE(space)
1113 
1114  if (new_space()) {
1116  }
1117 
1120 
1122 #undef UPDATE_COUNTERS_FOR_SPACE
1123 #undef UPDATE_FRAGMENTATION_FOR_SPACE
1124 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
1125 
1126 #ifdef DEBUG
1127  if (v8_flags.print_global_handles) isolate_->global_handles()->Print();
1128  if (v8_flags.print_handles) PrintHandles();
1129  if (v8_flags.check_handle_count) CheckHandleCount();
1130 #endif
1131 
1132  // Young generation GCs only run with memory reducing flags during
1133  // interleaved GCs.
1135  if (collector == GarbageCollector::MARK_COMPACTOR) {
1137  std::memory_order_relaxed);
1138 
1139  if (v8_flags.stress_marking > 0) {
1141  }
1142  // Discard memory if the GC was requested to reduce memory.
1143  if (ShouldReduceMemory()) {
1144  memory_allocator_->ReleasePooledChunksImmediately();
1145 #if V8_ENABLE_WEBASSEMBLY
1146  isolate_->stack_pool().ReleaseFinishedStacks();
1147 #endif
1148  }
1149  }
1150 
1151  // Remove CollectionRequested flag from main thread state, as the collection
1152  // was just performed.
1153  safepoint()->AssertActive();
1154  LocalHeap::ThreadState old_state =
1156 
1157  CHECK(old_state.IsRunning());
1158 
1159  // Resume all threads waiting for the GC.
1160  collection_barrier_->ResumeThreadsAwaitingCollection();
1161 }
void CheckHandleCount()
Definition: heap.cc:6623
std::unique_ptr< MemoryAllocator > memory_allocator_
Definition: heap.h:2336
int NextStressMarkingLimit()
Definition: heap.cc:6128
int stress_marking_percentage_
Definition: heap.h:2262
#define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space)
#define UPDATE_COUNTERS_FOR_SPACE(space)
PerThreadAssertScopeDebugOnly< true, HANDLE_USAGE_ON_ALL_THREADS_ASSERT > AllowHandleUsageOnAllThreads
Definition: assert-scope.h:219

References v8::internal::IsolateSafepoint::AssertActive(), CHECK, CheckHandleCount(), v8::internal::LocalHeap::AtomicThreadState::ClearCollectionRequested(), code_space(), collection_barrier_, DCHECK_IMPLIES, v8::internal::Isolate::global_handles(), v8::internal::Isolate::global_safepoint(), v8::internal::Isolate::heap(), v8::internal::LocalHeap::InvokeGCEpilogueCallbacksInSafepoint(), isolate(), isolate_, v8::internal::LocalHeap::ThreadState::IsRunning(), IsYoungGenerationCollector(), v8::internal::GlobalSafepoint::IterateClientIsolates(), v8::internal::IsolateSafepoint::IterateLocalHeaps(), v8::kNone, v8::internal::anonymous_namespace{elements.cc}::kShared, lo_space(), main_thread_local_heap(), v8::internal::MARK_COMPACTOR, memory_allocator_, memory_pressure_level_, new_space(), NextStressMarkingLimit(), old_space(), safepoint(), ShouldReduceMemory(), v8::internal::LocalHeap::state_, stress_marking_percentage_, TRACE_GC, tracer(), UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE, UPDATE_COUNTERS_FOR_SPACE, and v8::internal::v8_flags.

Referenced by PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GarbageCollectionPrologue()

void v8::internal::Heap::GarbageCollectionPrologue ( GarbageCollectionReason  gc_reason,
const v8::GCCallbackFlags  gc_callback_flags 
)
private

Definition at line 950 of file heap.cc.

952  {
953  TRACE_GC(tracer(), GCTracer::Scope::HEAP_PROLOGUE);
954 
955  is_current_gc_forced_ = gc_callback_flags & v8::kGCCallbackFlagForced ||
961 
962 #ifdef V8_ENABLE_ALLOCATION_TIMEOUT
963  heap_allocator_->UpdateAllocationTimeout();
964 #endif // V8_ENABLE_ALLOCATION_TIMEOUT
965 
966  if (minor_gc_job()) {
969  }
970 
971  // Reset GC statistics.
977  nodes_promoted_ = 0;
978 
980 
981 #ifdef DEBUG
982  DCHECK(!AllowGarbageCollection::IsAllowed());
984 
985  if (v8_flags.gc_verbose) Print();
986 #endif // DEBUG
987 }
int nodes_copied_in_new_space_
Definition: heap.h:2320
HeapState gc_state() const
Definition: heap.h:564
size_t previous_new_space_surviving_object_size_
Definition: heap.h:2317
MinorGCJob * minor_gc_job()
Definition: heap.h:2054
int nodes_died_in_new_space_
Definition: heap.h:2319
int nodes_promoted_
Definition: heap.h:2321
size_t new_space_surviving_object_size_
Definition: heap.h:2316
size_t promoted_objects_size_
Definition: heap.h:2313
void Print(Tagged< Object > obj)
Definition: objects.h:772

References v8::internal::MinorGCJob::CancelTaskIfScheduled(), current_gc_flags_, v8::internal::DCHECK(), DCHECK_EQ, force_gc_on_next_allocation_, gc_state(), heap_allocator_, is_current_gc_for_heap_profiler_, is_current_gc_forced_, v8::internal::kForced, v8::kGCCallbackFlagForced, v8::internal::kHeapProfiler, minor_gc_job(), new_space_surviving_object_size_, nodes_copied_in_new_space_, nodes_died_in_new_space_, nodes_promoted_, NOT_IN_GC, previous_new_space_surviving_object_size_, v8::internal::Print(), promoted_objects_size_, TRACE_GC, tracer(), UpdateMaximumCommitted(), use_new_space(), and v8::internal::v8_flags.

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GarbageCollectionPrologueInSafepoint()

void v8::internal::Heap::GarbageCollectionPrologueInSafepoint ( GarbageCollector  collector)
private

Definition at line 989 of file heap.cc.

989  {
990  TRACE_GC(tracer(), GCTracer::Scope::HEAP_PROLOGUE_SAFEPOINT);
991  gc_count_++;
993  if (v8_flags.large_page_pool_timeout == 0 &&
994  collector == GarbageCollector::MARK_COMPACTOR) {
996  }
997 }
size_t new_space_allocation_counter_
Definition: heap.h:2387
V8_EXPORT_PRIVATE size_t NewSpaceAllocationCounter() const
Definition: heap.cc:999
void ReleaseLargeImmediately()
Definition: page-pool.cc:333

References gc_count_, v8::internal::MARK_COMPACTOR, memory_allocator(), new_space_allocation_counter_, NewSpaceAllocationCounter(), v8::internal::MemoryAllocator::pool(), v8::internal::PagePool::ReleaseLargeImmediately(), TRACE_GC, tracer(), and v8::internal::v8_flags.

Referenced by PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ gc_count()

int v8::internal::Heap::gc_count ( ) const
inline

◆ gc_state()

HeapState v8::internal::Heap::gc_state ( ) const
inline

◆ GCFlagsForIncrementalMarking()

GCFlags v8::internal::Heap::GCFlagsForIncrementalMarking ( )
inline

Definition at line 1085 of file heap.h.

1085  {
1087  : GCFlag::kNoFlags;
1088  }

Referenced by CollectGarbage(), HandleExternalMemoryInterrupt(), and StartIncrementalMarkingOnInterrupt().

+ Here is the caller graph for this function:

◆ GcSafeFindCodeForInnerPointer()

Tagged< GcSafeCode > v8::internal::Heap::GcSafeFindCodeForInnerPointer ( Address  inner_pointer)

Definition at line 7356 of file heap.cc.

7356  {
7357  std::optional<Tagged<GcSafeCode>> maybe_code =
7358  GcSafeTryFindCodeForInnerPointer(inner_pointer);
7359  // Callers expect that the code object is found.
7360  CHECK(maybe_code.has_value());
7361  return UncheckedCast<GcSafeCode>(maybe_code.value());
7362 }
std::optional< Tagged< GcSafeCode > > GcSafeTryFindCodeForInnerPointer(Address inner_pointer)
Definition: heap.cc:7337

References CHECK, and GcSafeTryFindCodeForInnerPointer().

Referenced by FindCodeForInnerPointer().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GcSafeGetCodeFromInstructionStream()

Tagged< GcSafeCode > v8::internal::Heap::GcSafeGetCodeFromInstructionStream ( Tagged< HeapObject instruction_stream,
Address  inner_pointer 
)
private

Definition at line 7298 of file heap.cc.

7299  {
7300  Tagged<InstructionStream> istream =
7301  UncheckedCast<InstructionStream>(instruction_stream);
7302  DCHECK(!istream.is_null());
7303  DCHECK(GcSafeInstructionStreamContains(istream, inner_pointer));
7304  return UncheckedCast<GcSafeCode>(istream->raw_code(kAcquireLoad));
7305 }
bool GcSafeInstructionStreamContains(Tagged< InstructionStream > instruction_stream, Address addr)
Definition: heap.cc:7307
static constexpr AcquireLoadTag kAcquireLoad
Definition: globals.h:2942

References v8::internal::DCHECK(), GcSafeInstructionStreamContains(), v8::internal::Tagged< HeapObject >::is_null(), and v8::kAcquireLoad.

Referenced by GcSafeTryFindCodeForInnerPointer().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GcSafeInstructionStreamContains()

bool v8::internal::Heap::GcSafeInstructionStreamContains ( Tagged< InstructionStream instruction_stream,
Address  addr 
)

Definition at line 7307 of file heap.cc.

7308  {
7309  Tagged<Map> map = GcSafeMapOfHeapObject(instruction_stream);
7310  DCHECK_EQ(map, ReadOnlyRoots(this).instruction_stream_map());
7311 
7312  Builtin builtin_lookup_result =
7314  if (Builtins::IsBuiltinId(builtin_lookup_result)) {
7315  // Builtins don't have InstructionStream objects.
7317  instruction_stream->code(kAcquireLoad)->builtin_id()));
7318  return false;
7319  }
7320 
7321  Address start = instruction_stream.address();
7322  Address end = start + instruction_stream->SizeFromMap(map);
7323  return start <= addr && addr < end;
7324 }
static constexpr bool IsBuiltinId(Builtin builtin)
Definition: builtins.h:198
Tagged< Map > GcSafeMapOfHeapObject(Tagged< HeapObject > object)
Definition: heap.cc:7289
static Builtin TryLookupCode(Isolate *isolate, Address address)

References v8::internal::Tagged< HeapObject >::address(), v8::internal::DCHECK(), DCHECK_EQ, v8::internal::compiler::end(), GcSafeMapOfHeapObject(), v8::internal::Builtins::IsBuiltinId(), isolate(), v8::kAcquireLoad, ReadOnlyRoots, and v8::internal::OffHeapInstructionStream::TryLookupCode().

Referenced by GcSafeGetCodeFromInstructionStream().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GcSafeMapOfHeapObject()

Tagged< Map > v8::internal::Heap::GcSafeMapOfHeapObject ( Tagged< HeapObject object)
private

Definition at line 7289 of file heap.cc.

7289  {
7290  PtrComprCageBase cage_base(isolate());
7291  MapWord map_word = object->map_word(cage_base, kRelaxedLoad);
7292  if (map_word.IsForwardingAddress()) {
7293  return map_word.ToForwardingAddress(object)->map(cage_base);
7294  }
7295  return map_word.ToMap();
7296 }
static constexpr RelaxedLoadTag kRelaxedLoad
Definition: globals.h:2943

References v8::internal::MapWord::IsForwardingAddress(), isolate(), v8::kRelaxedLoad, v8::internal::MapWord::ToForwardingAddress(), and v8::internal::MapWord::ToMap().

Referenced by GcSafeInstructionStreamContains().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GcSafeTryFindCodeForInnerPointer()

std::optional< Tagged< GcSafeCode > > v8::internal::Heap::GcSafeTryFindCodeForInnerPointer ( Address  inner_pointer)

Definition at line 7337 of file heap.cc.

7338  {
7339  Builtin maybe_builtin =
7341  if (Builtins::IsBuiltinId(maybe_builtin)) {
7342  return Cast<GcSafeCode>(isolate()->builtins()->code(maybe_builtin));
7343  }
7344 
7345  std::optional<Tagged<InstructionStream>> maybe_istream =
7347  if (!maybe_istream) return {};
7348 
7349  return GcSafeGetCodeFromInstructionStream(*maybe_istream, inner_pointer);
7350 }
Tagged< GcSafeCode > GcSafeGetCodeFromInstructionStream(Tagged< HeapObject > instruction_stream, Address inner_pointer)
Definition: heap.cc:7298
std::optional< Tagged< InstructionStream > > GcSafeTryFindInstructionStreamForInnerPointer(Address inner_pointer)
Definition: heap.cc:7327

References code, GcSafeGetCodeFromInstructionStream(), GcSafeTryFindInstructionStreamForInnerPointer(), v8::internal::Builtins::IsBuiltinId(), isolate(), and v8::internal::OffHeapInstructionStream::TryLookupCode().

Referenced by GcSafeFindCodeForInnerPointer(), v8::internal::ETWJITInterface::EtwIsolateOperations::HeapGcSafeTryFindCodeForInnerPointer(), TryFindCodeForInnerPointerForPrinting(), v8::internal::TurbofanStubWithContextFrame::unchecked_code(), and v8::internal::StubFrame::unchecked_code().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GcSafeTryFindInstructionStreamForInnerPointer()

std::optional< Tagged< InstructionStream > > v8::internal::Heap::GcSafeTryFindInstructionStreamForInnerPointer ( Address  inner_pointer)

Definition at line 7327 of file heap.cc.

7327  {
7328  std::optional<Address> start =
7330  if (start.has_value()) {
7331  return UncheckedCast<InstructionStream>(HeapObject::FromAddress(*start));
7332  }
7333 
7334  return {};
7335 }
static Tagged< HeapObject > FromAddress(Address address)
Definition: heap-object.h:226
static std::optional< Address > StartOfJitAllocationAt(Address inner_pointer)

References v8::internal::HeapObject::FromAddress(), and v8::internal::ThreadIsolation::StartOfJitAllocationAt().

Referenced by GcSafeTryFindCodeForInnerPointer().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GenerationSizesFromHeapSize()

void v8::internal::Heap::GenerationSizesFromHeapSize ( size_t  heap_size,
size_t young_generation_size,
size_t old_generation_size 
)
static

Definition at line 244 of file heap.cc.

246  {
247  // Initialize values for the case when the given heap size is too small.
248  *young_generation_size = 0;
249  *old_generation_size = 0;
250  // Binary search for the largest old generation size that fits to the given
251  // heap limit considering the correspondingly sized young generation.
252  size_t lower = 0, upper = heap_size;
253  while (lower + 1 < upper) {
254  size_t old_generation = lower + (upper - lower) / 2;
255  size_t young_generation =
257  if (old_generation + young_generation <= heap_size) {
258  // This size configuration fits into the given heap limit.
259  *young_generation_size = young_generation;
260  *old_generation_size = old_generation;
261  lower = old_generation;
262  } else {
263  upper = old_generation;
264  }
265  }
266 }
static V8_EXPORT_PRIVATE size_t YoungGenerationSizeFromOldGenerationSize(size_t old_generation_size)
Definition: heap.cc:209

References YoungGenerationSizeFromOldGenerationSize().

Referenced by v8::ResourceConstraints::ConfigureDefaults(), v8::ResourceConstraints::ConfigureDefaultsFromHeapSize(), and ConfigureHeap().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GetCodeRangeReservedAreaSize()

size_t v8::internal::Heap::GetCodeRangeReservedAreaSize ( )
static

Definition at line 3153 of file heap.cc.

3153  {
3155 }
static size_t GetWritableReservedAreaSize()
Definition: code-range.cc:63

References v8::internal::CodeRange::GetWritableReservedAreaSize().

Referenced by v8::internal::wasm::WasmCodeAllocator::InitializeCodeRange(), and v8::internal::wasm::anonymous_namespace{wasm-code-manager.cc}::OverheadPerCodeSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GetEmbedderRootsHandler()

EmbedderRootsHandler * v8::internal::Heap::GetEmbedderRootsHandler ( ) const

Definition at line 6230 of file heap.cc.

6230  {
6231  return embedder_roots_handler_;
6232 }
EmbedderRootsHandler * embedder_roots_handler_
Definition: heap.h:2371

References embedder_roots_handler_.

Referenced by v8::internal::TracedHandles::SupportsClearingWeakNonLiveWrappers().

+ Here is the caller graph for this function:

◆ GetFillToAlign()

int v8::internal::Heap::GetFillToAlign ( Address  address,
AllocationAlignment  alignment 
)
static

Definition at line 3143 of file heap.cc.

3143  {
3144  if (V8_COMPRESS_POINTERS_8GB_BOOL) return 0;
3145  if (alignment == kDoubleAligned && (address & kDoubleAlignmentMask) != 0)
3146  return kTaggedSize;
3147  if (alignment == kDoubleUnaligned && (address & kDoubleAlignmentMask) == 0) {
3148  return kDoubleSize - kTaggedSize; // No fill if double is always aligned.
3149  }
3150  return 0;
3151 }
#define V8_COMPRESS_POINTERS_8GB_BOOL
Definition: globals.h:599
@ kDoubleAligned
Definition: globals.h:1568
@ kDoubleUnaligned
Definition: globals.h:1570
constexpr int kDoubleSize
Definition: globals.h:408
constexpr intptr_t kDoubleAlignmentMask
Definition: globals.h:944

References v8::internal::kDoubleAligned, v8::internal::kDoubleAlignmentMask, v8::internal::kDoubleSize, v8::internal::kDoubleUnaligned, v8::internal::kTaggedSize, and V8_COMPRESS_POINTERS_8GB_BOOL.

Referenced by v8::internal::SemiSpaceNewSpace::AddParkedAllocationBuffer(), v8::internal::MainAllocator::AlignTopForTesting(), AlignWithFillerBackground(), v8::internal::SemiSpaceNewSpace::Allocate(), v8::internal::MainAllocator::AllocateFastAligned(), v8::internal::HeapObject::CheckRequiredAlignment(), v8::internal::SemiSpaceNewSpaceAllocatorPolicy::EnsureAllocation(), and v8::internal::ReadOnlySpace::TryAllocateLinearlyAligned().

+ Here is the caller graph for this function:

◆ GetForegroundTaskRunner()

std::shared_ptr< v8::TaskRunner > v8::internal::Heap::GetForegroundTaskRunner ( TaskPriority  priority = TaskPriority::kUserBlocking) const

Definition at line 6101 of file heap.cc.

6102  {
6104  reinterpret_cast<v8::Isolate*>(isolate()), priority);
6105 }
std::shared_ptr< v8::TaskRunner > GetForegroundTaskRunner(Isolate *isolate)
Returns a TaskRunner which can be used to post a task on the foreground.
Definition: v8-platform.h:1141
static V8_EXPORT_PRIVATE v8::Platform * GetCurrentPlatform()
Definition: v8.cc:282

References v8::internal::V8::GetCurrentPlatform(), v8::Platform::GetForegroundTaskRunner(), and isolate().

Referenced by v8::internal::IsolateSafepoint::InitiateGlobalSafepointScopeRaw(), v8::internal::MemoryBalancer::PostHeartbeatTask(), and v8::internal::MinorGCJob::TryScheduleTask().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GetFromRingBuffer()

void v8::internal::Heap::GetFromRingBuffer ( char *  buffer)
private

Definition at line 5311 of file heap.cc.

5311  {
5312  size_t copied = 0;
5313  if (ring_buffer_full_) {
5315  memcpy(buffer, trace_ring_buffer_ + ring_buffer_end_, copied);
5316  }
5317  memcpy(buffer + copied, trace_ring_buffer_, ring_buffer_end_);
5318 }

References kTraceRingBufferSize, ring_buffer_end_, ring_buffer_full_, and trace_ring_buffer_.

Referenced by RecordStats().

+ Here is the caller graph for this function:

◆ GetMaximumFillToAlign()

int v8::internal::Heap::GetMaximumFillToAlign ( AllocationAlignment  alignment)
static

Definition at line 3129 of file heap.cc.

3129  {
3130  if (V8_COMPRESS_POINTERS_8GB_BOOL) return 0;
3131  switch (alignment) {
3132  case kTaggedAligned:
3133  return 0;
3134  case kDoubleAligned:
3135  case kDoubleUnaligned:
3136  return kDoubleSize - kTaggedSize;
3137  default:
3138  UNREACHABLE();
3139  }
3140 }

References v8::internal::kDoubleAligned, v8::internal::kDoubleSize, v8::internal::kDoubleUnaligned, v8::internal::kTaggedAligned, v8::internal::kTaggedSize, v8::internal::UNREACHABLE(), and V8_COMPRESS_POINTERS_8GB_BOOL.

Referenced by v8::internal::ReadOnlySpace::AllocateRawAligned(), v8::internal::MainAllocator::AllocateRawSlowAligned(), and v8::internal::PagedSpaceAllocatorPolicy::EnsureAllocation().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GetNextTemplateSerialNumber()

uint32_t v8::internal::Heap::GetNextTemplateSerialNumber ( )
inline

Definition at line 386 of file heap-inl.h.

386  {
387  uint32_t next_serial_number =
388  static_cast<uint32_t>(next_template_serial_number().value());
389  if (next_serial_number < Smi::kMaxValue) {
390  ++next_serial_number;
391  } else {
392  // In case of overflow, restart from a range where it's ok for serial
393  // numbers to be non-unique.
394  next_serial_number = TemplateInfo::kFirstNonUniqueSerialNumber;
395  }
397  set_next_template_serial_number(Smi::FromInt(next_serial_number));
398  return next_serial_number;
399 }
static constexpr int kMaxValue
Definition: smi.h:101
static const int kFirstNonUniqueSerialNumber
Definition: templates.h:44

References DCHECK_NE, v8::internal::Smi::FromInt(), v8::internal::TemplateInfo::kFirstNonUniqueSerialNumber, v8::internal::Smi::kMaxValue, and v8::internal::TemplateInfo::kUninitializedSerialNumber.

Referenced by v8::internal::TemplateInfo::EnsureHasSerialNumber().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GetObjectTypeName()

bool v8::internal::Heap::GetObjectTypeName ( size_t  index,
const char **  object_type,
const char **  object_sub_type 
)

Definition at line 7167 of file heap.cc.

7168  {
7169  if (index >= ObjectStats::OBJECT_STATS_COUNT) return false;
7170 
7171  switch (static_cast<int>(index)) {
7172 #define COMPARE_AND_RETURN_NAME(name) \
7173  case name: \
7174  *object_type = #name; \
7175  *object_sub_type = ""; \
7176  return true;
7178 #undef COMPARE_AND_RETURN_NAME
7179 
7180 #define COMPARE_AND_RETURN_NAME(name) \
7181  case ObjectStats::FIRST_VIRTUAL_TYPE + \
7182  static_cast<int>(ObjectStats::VirtualInstanceType::name): \
7183  *object_type = #name; \
7184  *object_sub_type = ""; \
7185  return true;
7187 #undef COMPARE_AND_RETURN_NAME
7188  }
7189  return false;
7190 }
static constexpr int OBJECT_STATS_COUNT
Definition: object-stats.h:112
#define COMPARE_AND_RETURN_NAME(name)
#define VIRTUAL_INSTANCE_TYPE_LIST(V)
Definition: object-stats.h:17
#define INSTANCE_TYPE_LIST(V)

References COMPARE_AND_RETURN_NAME, v8::internal::index, INSTANCE_TYPE_LIST, v8::internal::ObjectStats::OBJECT_STATS_COUNT, and VIRTUAL_INSTANCE_TYPE_LIST.

◆ GetRandomMmapAddr()

void* v8::internal::Heap::GetRandomMmapAddr ( )
inline

Definition at line 1631 of file heap.h.

1631  {
1633 #if V8_TARGET_ARCH_X64
1634 #if V8_OS_DARWIN
1635  // The Darwin kernel [as of macOS 10.12.5] does not clean up page
1636  // directory entries [PDE] created from mmap or mach_vm_allocate, even
1637  // after the region is destroyed. Using a virtual address space that is
1638  // too large causes a leak of about 1 wired [can never be paged out] page
1639  // per call to mmap(). The page is only reclaimed when the process is
1640  // killed. Confine the hint to a 32-bit section of the virtual address
1641  // space. See crbug.com/700928.
1642  uintptr_t offset = reinterpret_cast<uintptr_t>(result) & kMmapRegionMask;
1643  result = reinterpret_cast<void*>(mmap_region_base_ + offset);
1644 #endif // V8_OS_DARWIN
1645 #endif // V8_TARGET_ARCH_X64
1646  return result;
1647  }
uintptr_t mmap_region_base_
Definition: heap.h:2282
static const uintptr_t kMmapRegionMask
Definition: heap.h:2281
void * GetRandomMmapAddr()
Definition: allocation.cc:161

References v8::internal::GetRandomMmapAddr(), and v8::base::internal::result.

Referenced by v8::internal::MemoryAllocator::AllocateUninitializedChunkAt(), and v8::internal::OffHeapInstructionStream::CreateOffHeapOffHeapInstructionStream().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ global_allocation_limit()

size_t v8::internal::Heap::global_allocation_limit ( ) const
inlineprivate

Definition at line 1987 of file heap.h.

1987  {
1988  return global_allocation_limit_.load(std::memory_order_relaxed);
1989  }
std::atomic< size_t > global_allocation_limit_
Definition: heap.h:2293

Referenced by AllocationLimitOvershotByLargeMargin(), EnsureMinimumRemainingAllocationLimit(), GlobalMemoryAvailable(), PercentToGlobalMemoryLimit(), v8::internal::GCTracer::PrintNVP(), RecomputeLimits(), RecomputeLimitsAfterLoadingIfNeeded(), ShrinkOldGenerationAllocationLimitIfNotConfigured(), and v8::internal::IncrementalMarking::Start().

+ Here is the caller graph for this function:

◆ GlobalConsumedBytes()

size_t v8::internal::Heap::GlobalConsumedBytes ( ) const

Definition at line 5431 of file heap.cc.

5431  {
5433 }
V8_EXPORT_PRIVATE size_t GlobalWastedBytes() const
Definition: heap.cc:5429
V8_EXPORT_PRIVATE size_t GlobalSizeOfObjects() const
Definition: heap.cc:5423

References GlobalSizeOfObjects(), and GlobalWastedBytes().

Referenced by AllocationLimitOvershotByLargeMargin(), EnsureMinimumRemainingAllocationLimit(), GlobalMemoryAvailable(), PercentToGlobalMemoryLimit(), v8::internal::GCTracer::RecordGCSizeCounters(), and ShrinkOldGenerationAllocationLimitIfNotConfigured().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GlobalConsumedBytesAtLastGC()

size_t v8::internal::Heap::GlobalConsumedBytesAtLastGC ( ) const

Definition at line 5439 of file heap.cc.

5439  {
5441  (v8_flags.external_memory_accounted_in_global_limit
5443  : 0);
5444 }
size_t embedder_size_at_last_gc_
Definition: heap.h:2401
V8_EXPORT_PRIVATE size_t OldGenerationConsumedBytesAtLastGC() const
Definition: heap.cc:5435

References embedder_size_at_last_gc_, external_memory_, v8::internal::Heap::ExternalMemoryAccounting::low_since_mark_compact(), OldGenerationConsumedBytesAtLastGC(), and v8::internal::v8_flags.

Referenced by PercentToGlobalMemoryLimit().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GlobalMemoryAvailable()

size_t v8::internal::Heap::GlobalMemoryAvailable ( )
private

Definition at line 5594 of file heap.cc.

5594  {
5595  size_t global_size = GlobalConsumedBytes();
5596  size_t global_limit = global_allocation_limit();
5597 
5598  if (global_size < global_limit) {
5599  return global_limit - global_size;
5600  } else {
5601  return 0;
5602  }
5603 }

References global_allocation_limit(), and GlobalConsumedBytes().

Referenced by IncrementalMarkingLimitReached(), and RecomputeLimitsAfterLoadingIfNeeded().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GlobalSizeOfObjects()

size_t v8::internal::Heap::GlobalSizeOfObjects ( ) const

Definition at line 5423 of file heap.cc.

5423  {
5425  (v8_flags.external_memory_accounted_in_global_limit ? external_memory()
5426  : 0);
5427 }
V8_EXPORT_PRIVATE size_t EmbedderSizeOfObjects() const
Definition: heap.cc:5419

References EmbedderSizeOfObjects(), external_memory(), OldGenerationSizeOfObjects(), and v8::internal::v8_flags.

Referenced by GlobalConsumedBytes(), and v8::internal::IncrementalMarking::Start().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GlobalWastedBytes()

size_t v8::internal::Heap::GlobalWastedBytes ( ) const

Definition at line 5429 of file heap.cc.

5429 { return OldGenerationWastedBytes(); }
V8_EXPORT_PRIVATE size_t OldGenerationWastedBytes() const
Definition: heap.cc:5371

References OldGenerationWastedBytes().

Referenced by GlobalConsumedBytes(), and v8::internal::IncrementalMarking::Start().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ HandleExternalMemoryInterrupt()

void v8::internal::Heap::HandleExternalMemoryInterrupt ( )

Definition at line 1453 of file heap.cc.

1453  {
1454  const GCCallbackFlags kGCCallbackFlagsForExternalMemory =
1455  static_cast<GCCallbackFlags>(
1458  uint64_t current = external_memory();
1459  if (current > external_memory_hard_limit()) {
1460  TRACE_EVENT2("devtools.timeline,v8", "V8.ExternalMemoryPressure",
1461  "external_memory_mb", static_cast<int>((current) / MB),
1462  "external_memory_hard_limit_mb",
1463  static_cast<int>((external_memory_hard_limit()) / MB));
1468  kGCCallbackFlagsForExternalMemory));
1469  return;
1470  }
1471  if (v8_flags.external_memory_accounted_in_global_limit) {
1472  // Under `external_memory_accounted_in_global_limit`, external interrupt
1473  // only triggers a check to allocation limits.
1477  kGCCallbackFlagsForExternalMemory);
1478  return;
1479  }
1480  uint64_t soft_limit = external_memory_.soft_limit();
1481  if (current <= soft_limit) {
1482  return;
1483  }
1484  TRACE_EVENT2("devtools.timeline,v8", "V8.ExternalMemoryPressure",
1485  "external_memory_mb", static_cast<int>((current) / MB),
1486  "external_memory_soft_limit_mb",
1487  static_cast<int>((soft_limit) / MB));
1488  if (incremental_marking()->IsStopped()) {
1489  if (incremental_marking()->CanAndShouldBeStarted()) {
1492  kGCCallbackFlagsForExternalMemory);
1493  } else {
1496  kGCCallbackFlagsForExternalMemory);
1497  }
1498  } else {
1499  // Incremental marking is turned on and has already been started.
1501  current_gc_callback_flags_ | kGCCallbackFlagsForExternalMemory);
1503  }
1504 }
void UpdateLimitForInterrupt(uint64_t amount)
Definition: heap.h:258
uint64_t external_memory_hard_limit()
Definition: heap.h:655
GCCallbackFlags
GCCallbackFlags is used to notify additional information about the GC callback.
Definition: v8-callbacks.h:179
@ kGCCallbackFlagCollectAllExternalMemory
Definition: v8-callbacks.h:185
@ kGCCallbackFlagSynchronousPhantomCallbackProcessing
Definition: v8-callbacks.h:183
#define TRACE_EVENT2(category_group, name, arg1_name, arg1_val, arg2_name, arg2_val)

References v8::internal::IncrementalMarking::AdvanceAndFinalizeIfNecessary(), CollectAllGarbage(), current_gc_callback_flags_, external_memory(), external_memory_, external_memory_hard_limit(), GCFlagsForIncrementalMarking(), incremental_marking(), v8::internal::kExternalMemoryPressure, v8::kGCCallbackFlagCollectAllAvailableGarbage, v8::kGCCallbackFlagCollectAllExternalMemory, v8::kGCCallbackFlagSynchronousPhantomCallbackProcessing, v8::internal::kNoFlags, v8::internal::kReduceMemoryFootprint, main_thread_local_heap(), v8::internal::MB, v8::internal::Heap::ExternalMemoryAccounting::soft_limit(), StartIncrementalMarking(), StartIncrementalMarkingIfAllocationLimitIsReached(), TRACE_EVENT2, v8::internal::Heap::ExternalMemoryAccounting::UpdateLimitForInterrupt(), and v8::internal::v8_flags.

+ Here is the call graph for this function:

◆ HandleGCRequest()

void v8::internal::Heap::HandleGCRequest ( )

Definition at line 1205 of file heap.cc.

1205  {
1209  } else if (HighMemoryPressure()) {
1211  } else if (CollectionRequested()) {
1213  } else if (incremental_marking()->MajorCollectionRequested()) {
1217  } else if (minor_mark_sweep_collector()->gc_finalization_requsted()) {
1220  }
1221 }
void CheckCollectionRequested()
Definition: heap.cc:2239
void CheckMemoryPressure()
Definition: heap.cc:4273
MinorMarkSweepCollector * minor_mark_sweep_collector()
Definition: heap.h:860
bool IsStressingScavenge()
Definition: heap.cc:7385
StressScavengeObserver * stress_scavenge_observer_
Definition: heap.h:2265

References CheckCollectionRequested(), CheckMemoryPressure(), CollectAllGarbage(), CollectGarbage(), CollectionRequested(), current_gc_callback_flags_, current_gc_flags_, v8::internal::StressScavengeObserver::HasRequestedGC(), HighMemoryPressure(), incremental_marking(), IsStressingScavenge(), v8::internal::kFinalizeConcurrentMinorMS, v8::internal::kFinalizeMarkingViaStackGuard, v8::internal::kTesting, minor_mark_sweep_collector(), v8::internal::NEW_SPACE, v8::internal::StressScavengeObserver::RequestedGCDone(), and stress_scavenge_observer_.

Referenced by v8::internal::StackGuard::HandleInterrupts().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ has_heap_object_allocation_tracker()

bool v8::internal::Heap::has_heap_object_allocation_tracker ( ) const
inline

Definition at line 1562 of file heap.h.

1562  {
1563  return !allocation_trackers_.empty();
1564  }

Referenced by v8::internal::Isolate::UpdateLogObjectRelocation().

+ Here is the caller graph for this function:

◆ HasBeenSetUp()

bool v8::internal::Heap::HasBeenSetUp ( ) const

Definition at line 449 of file heap.cc.

449  {
450  // We will always have an old space when the heap is set up.
451  return old_space_ != nullptr;
452 }

References old_space_.

Referenced by Available(), Capacity(), CommittedMemory(), CommittedMemoryExecutable(), CommittedOldGenerationMemory(), CommittedPhysicalMemory(), Contains(), ContainsCode(), v8::internal::Isolate::Init(), InSpace(), InSpaceSlow(), OldGenerationCapacity(), v8::internal::ProfilingScope::ProfilingScope(), and UpdateMaximumCommitted().

+ Here is the caller graph for this function:

◆ HasDirtyJSFinalizationRegistries()

bool v8::internal::Heap::HasDirtyJSFinalizationRegistries ( )

Definition at line 7035 of file heap.cc.

7035  {
7036  return !IsUndefined(dirty_js_finalization_registries_list(), isolate());
7037 }

References dirty_js_finalization_registries_list(), and isolate().

Referenced by DequeueDirtyJSFinalizationRegistry(), EnqueueDirtyJSFinalizationRegistry(), and PostFinalizationRegistryCleanupTaskIfNeeded().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ HasHighFragmentation()

bool v8::internal::Heap::HasHighFragmentation ( )

Definition at line 3883 of file heap.cc.

3883  {
3884  const size_t used = OldGenerationSizeOfObjects();
3885  const size_t committed = CommittedOldGenerationMemory();
3886 
3887  // Background thread allocation could result in committed memory being less
3888  // than used memory in some situations.
3889  if (committed < used) return false;
3890 
3891  constexpr size_t kSlack = 16 * MB;
3892 
3893  // Fragmentation is high if committed > 2 * used + kSlack.
3894  // Rewrite the expression to avoid overflow.
3895  return committed - used > used + kSlack;
3896 }

References CommittedOldGenerationMemory(), v8::internal::MB, and OldGenerationSizeOfObjects().

Referenced by v8::internal::MemoryReducer::NotifyMarkCompact().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ HasLowAllocationRate()

bool v8::internal::Heap::HasLowAllocationRate ( )

Definition at line 3843 of file heap.cc.

3843  {
3846 }
bool HasLowEmbedderAllocationRate()
Definition: heap.cc:3835
bool HasLowYoungGenerationAllocationRate()
Definition: heap.cc:3816
bool HasLowOldGenerationAllocationRate()
Definition: heap.cc:3826

References HasLowEmbedderAllocationRate(), HasLowOldGenerationAllocationRate(), and HasLowYoungGenerationAllocationRate().

+ Here is the call graph for this function:

◆ HasLowEmbedderAllocationRate()

bool v8::internal::Heap::HasLowEmbedderAllocationRate ( )
private

Definition at line 3835 of file heap.cc.

3835  {
3836  double mu = ComputeMutatorUtilization(
3837  "Embedder", tracer()->EmbedderAllocationThroughputInBytesPerMillisecond(),
3838  tracer()->EmbedderSpeedInBytesPerMillisecond());
3839  const double kHighMutatorUtilization = 0.993;
3840  return mu > kHighMutatorUtilization;
3841 }
double ComputeMutatorUtilization(const char *tag, double mutator_speed, std::optional< double > gc_speed)
Definition: heap.cc:3804

References ComputeMutatorUtilization(), v8::internal::mu, and tracer().

Referenced by HasLowAllocationRate().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ HasLowOldGenerationAllocationRate()

bool v8::internal::Heap::HasLowOldGenerationAllocationRate ( )
private

Definition at line 3826 of file heap.cc.

3826  {
3827  double mu = ComputeMutatorUtilization(
3828  "Old generation",
3829  tracer()->OldGenerationAllocationThroughputInBytesPerMillisecond(),
3830  tracer()->OldGenerationSpeedInBytesPerMillisecond());
3831  const double kHighMutatorUtilization = 0.993;
3832  return mu > kHighMutatorUtilization;
3833 }

References ComputeMutatorUtilization(), v8::internal::mu, and tracer().

Referenced by HasLowAllocationRate().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ HasLowYoungGenerationAllocationRate()

bool v8::internal::Heap::HasLowYoungGenerationAllocationRate ( )
private

Definition at line 3816 of file heap.cc.

3816  {
3817  double mu = ComputeMutatorUtilization(
3818  "Young generation",
3819  tracer()->NewSpaceAllocationThroughputInBytesPerMillisecond(),
3820  tracer()->YoungGenerationSpeedInBytesPerMillisecond(
3822  constexpr double kHighMutatorUtilization = 0.993;
3823  return mu > kHighMutatorUtilization;
3824 }
@ kOnlyAtomicPause
Definition: gc-tracer.h:26

References ComputeMutatorUtilization(), v8::internal::kOnlyAtomicPause, v8::internal::mu, and tracer().

Referenced by HasLowAllocationRate(), and RecomputeLimits().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ heap_expansion_mutex()

base::Mutex* v8::internal::Heap::heap_expansion_mutex ( )
inline

Definition at line 1322 of file heap.h.

1322 { return &heap_expansion_mutex_; }
base::Mutex heap_expansion_mutex_
Definition: heap.h:2380

Referenced by v8::internal::LargeObjectSpace::AllocateLargePage().

+ Here is the caller graph for this function:

◆ heap_profiler()

HeapProfiler* v8::internal::Heap::heap_profiler ( ) const
inline

Definition at line 408 of file heap.h.

408 { return heap_profiler_.get(); }

Referenced by v8::internal::CppHeap::AttachIsolate(), CanMoveObjectStart(), CheckHeapLimitReached(), CheckIneffectiveMarkCompact(), CollectAllAvailableGarbage(), CollectGarbage(), v8::internal::Isolate::Deinit(), v8::HeapSnapshot::Delete(), v8::internal::Isolate::Init(), OnMoveEvent(), v8::internal::RUNTIME_FUNCTION(), and v8::internal::Isolate::UpdateLogObjectRelocation().

+ Here is the caller graph for this function:

◆ HeapLimitMultiplier()

size_t v8::internal::Heap::HeapLimitMultiplier ( )
static

Definition at line 5031 of file heap.cc.

5031  {
5032 #if V8_OS_ANDROID
5033  // Don't apply pointer multiplier on Android since it has no swap space and
5034  // should instead adapt it's heap size based on available physical memory.
5035  if (!v8_flags.high_end_android) {
5036  return 1;
5037  }
5038 #endif
5039 
5040  // The heap limit needs to be computed based on the system pointer size
5041  // because we want a pointer-compressed heap to have larger limit than
5042  // an ordinary 32-bit which that is constrained by 2GB virtual address space.
5043  return kSystemPointerSize / 4;
5044 }

References v8::internal::kSystemPointerSize, and v8::internal::v8_flags.

Referenced by DefaulMaxHeapSize(), DefaulMinHeapSize(), DefaultInitialOldGenerationSize(), HeapSizeFromPhysicalMemory(), OldGenerationLowMemory(), and OldGenerationToSemiSpaceRatioLowMemory().

+ Here is the caller graph for this function:

◆ HeapSizeFromPhysicalMemory()

size_t v8::internal::Heap::HeapSizeFromPhysicalMemory ( uint64_t  physical_memory)
static

Definition at line 226 of file heap.cc.

226  {
227  // Compute the old generation size and cap it.
228  uint64_t old_generation = physical_memory /
231  old_generation =
232  std::min(old_generation,
233  static_cast<uint64_t>(
234  MaxOldGenerationSizeFromPhysicalMemory(physical_memory)));
235  old_generation =
236  std::max({old_generation, static_cast<uint64_t>(DefaulMinHeapSize())});
237  old_generation = RoundUp(old_generation, PageMetadata::kPageSize);
238 
239  size_t young_generation = YoungGenerationSizeFromOldGenerationSize(
240  static_cast<size_t>(old_generation));
241  return static_cast<size_t>(old_generation) + young_generation;
242 }
static V8_EXPORT_PRIVATE size_t DefaulMinHeapSize()
Definition: heap.cc:5086
static constexpr size_t kPhysicalMemoryToOldGenerationRatio
Definition: heap.h:319
static V8_EXPORT_PRIVATE size_t MaxOldGenerationSizeFromPhysicalMemory(uint64_t physical_memory)
Definition: heap.cc:289

References DefaulMinHeapSize(), HeapLimitMultiplier(), v8::internal::MutablePageMetadata::kPageSize, kPhysicalMemoryToOldGenerationRatio, MaxOldGenerationSizeFromPhysicalMemory(), RoundUp(), and YoungGenerationSizeFromOldGenerationSize().

Referenced by v8::ResourceConstraints::ConfigureDefaults().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ HighMemoryPressure()

bool v8::internal::Heap::HighMemoryPressure ( )
inline

Definition at line 698 of file heap.h.

698  {
699  return memory_pressure_level_.load(std::memory_order_relaxed) !=
701  }

References v8::kNone.

Referenced by CheckMemoryPressure(), v8::internal::anonymous_namespace{compiler.cc}::CompileTurbofan_Concurrent(), HandleGCRequest(), IncrementalMarkingLimitReached(), and ShouldOptimizeForMemoryUsage().

+ Here is the caller graph for this function:

◆ ignore_local_gc_requests()

bool v8::internal::Heap::ignore_local_gc_requests ( ) const
inline

Definition at line 577 of file heap.h.

577  {
579  }
int ignore_local_gc_requests_depth_
Definition: heap.h:2435

◆ incremental_marking()

IncrementalMarking* v8::internal::Heap::incremental_marking ( ) const
inline

Definition at line 1117 of file heap.h.

1117  {
1118  return incremental_marking_.get();
1119  }
std::unique_ptr< IncrementalMarking > incremental_marking_
Definition: heap.h:2337

Referenced by v8::internal::UnifiedHeapMarker::AdvanceMarkingOnAllocationImpl(), AllocateExternalBackingStore(), v8::internal::MemoryAllocator::AllocatePage(), AllocationLimitOvershotByLargeMargin(), AttachCppHeap(), CanMoveObjectStart(), CanShortcutStringsDuringGC(), CheckMemoryPressure(), CollectGarbage(), CollectGarbageOnMemoryPressure(), v8::internal::PageMetadata::ConvertNewToOld(), FinalizeIncrementalMarkingAtomically(), FinalizeIncrementalMarkingAtomicallyIfRunning(), HandleExternalMemoryInterrupt(), HandleGCRequest(), v8::internal::ScavengerCollector::HandleSurvivingNewLargeObjects(), IncrementalMarkingLimitReached(), v8::internal::Isolate::Init(), v8::internal::MainAllocator::IsBlackAllocationEnabled(), IsNewSpaceAllowedToGrowAboveTargetCapacity(), v8::internal::LocalHeap::LocalHeap(), v8::internal::MinorMarkSweepCollector::MarkLiveObjects(), MinorMarkSweep(), NotifyLoadingEnded(), NotifyObjectLayoutChange(), v8::internal::MarkCompactCollector::Prepare(), v8::internal::OldLargeObjectSpace::PromoteNewLargeObject(), v8::internal::Sweeper::RawSweep(), RecomputeLimitsAfterLoadingIfNeeded(), v8::internal::PagedSpaceAllocatorPolicy::RefillLab(), RightTrimArray(), Scavenge(), v8::internal::IncrementalMarkingJob::ScheduleTask(), SelectGarbageCollector(), v8::internal::LocalHeap::SetUpSharedMarking(), ShouldExpandOldGenerationOnSlowAllocation(), ShouldExpandYoungGenerationOnSlowAllocation(), v8::internal::CppHeap::StartDetachingIsolate(), StartIncrementalMarking(), StartIncrementalMarkingIfAllocationLimitIsReached(), StartMinorMSConcurrentMarkingIfNeeded(), v8::internal::IncrementalMarking::Stop(), v8::internal::MarkCompactCollector::TearDown(), v8::internal::MinorMarkSweepCollector::TearDown(), v8::internal::MarkCompactCollector::VerifyMarking(), and WeakenDescriptorArrays().

+ Here is the caller graph for this function:

◆ IncrementalMarkingLimitReached()

Heap::IncrementalMarkingLimit v8::internal::Heap::IncrementalMarkingLimitReached ( )
private

Definition at line 5640 of file heap.cc.

5640  {
5641  // InstructionStream using an AlwaysAllocateScope assumes that the GC state
5642  // does not change; that implies that no marking steps must be performed.
5643  if (!incremental_marking()->CanAndShouldBeStarted() || always_allocate()) {
5644  // Incremental marking is disabled or it is too early to start.
5646  }
5647  if (v8_flags.stress_incremental_marking) {
5649  }
5650  if (incremental_marking()->IsBelowActivationThresholds()) {
5651  // Incremental marking is disabled or it is too early to start.
5653  }
5655  // If there is high memory pressure or stress testing is enabled, then
5656  // start marking immediately.
5658  }
5659 
5660  if (v8_flags.stress_marking > 0) {
5661  int current_percent = static_cast<int>(
5663  if (current_percent > 0) {
5664  if (v8_flags.trace_stress_marking) {
5665  isolate()->PrintWithTimestamp(
5666  "[IncrementalMarking] %d%% of the memory limit reached\n",
5667  current_percent);
5668  }
5669  if (v8_flags.fuzzer_gc_analysis) {
5670  // Skips values >=100% since they already trigger marking.
5671  if (current_percent < 100) {
5672  double max_marking_limit_reached =
5673  max_marking_limit_reached_.load(std::memory_order_relaxed);
5674  while (current_percent > max_marking_limit_reached) {
5675  max_marking_limit_reached_.compare_exchange_weak(
5676  max_marking_limit_reached, current_percent,
5677  std::memory_order_relaxed);
5678  }
5679  }
5680  } else if (current_percent >= stress_marking_percentage_) {
5682  }
5683  }
5684  }
5685 
5686  if (v8_flags.incremental_marking_soft_trigger > 0 ||
5687  v8_flags.incremental_marking_hard_trigger > 0) {
5688  int current_percent = static_cast<int>(
5690  if (current_percent > v8_flags.incremental_marking_hard_trigger &&
5691  v8_flags.incremental_marking_hard_trigger > 0) {
5693  }
5694  if (current_percent > v8_flags.incremental_marking_soft_trigger &&
5695  v8_flags.incremental_marking_soft_trigger > 0) {
5697  }
5699  }
5700 
5702  size_t old_generation_space_available = OldGenerationSpaceAvailable();
5703  size_t global_memory_available = GlobalMemoryAvailable();
5704 
5705  if (old_generation_space_available > NewSpaceTargetCapacity() &&
5706  (global_memory_available > NewSpaceTargetCapacity())) {
5707  if (cpp_heap() && gc_count_ == 0 && using_initial_limit()) {
5708  // At this point the embedder memory is above the activation
5709  // threshold. No GC happened so far and it's thus unlikely to get a
5710  // configured heap any time soon. Start a memory reducer in this case
5711  // which will wait until the allocation rate is low to trigger garbage
5712  // collection.
5714  }
5716  }
5719  }
5720  if (ShouldOptimizeForLoadTime()) {
5722  }
5723  if (old_generation_space_available == 0) {
5725  }
5726  if (global_memory_available == 0) {
5728  }
5730 }
void RecordGCSizeCounters() const
Definition: gc-tracer.cc:1526
double PercentToOldGenerationLimit() const
Definition: heap.cc:5622
size_t GlobalMemoryAvailable()
Definition: heap.cc:5594
bool ShouldStressCompaction() const
Definition: heap.cc:5732
size_t NewSpaceTargetCapacity() const
Definition: heap.cc:4050
size_t OldGenerationSpaceAvailable()
Definition: heap.h:1941
double PercentToGlobalMemoryLimit() const
Definition: heap.cc:5628
std::atomic< double > max_marking_limit_reached_
Definition: heap.h:2269

References always_allocate(), cpp_heap(), gc_count_, GlobalMemoryAvailable(), HighMemoryPressure(), incremental_marking(), isolate(), kFallbackForEmbedderLimit, kHardLimit, kNoLimit, kSoftLimit, max_marking_limit_reached_, NewSpaceTargetCapacity(), OldGenerationSpaceAvailable(), PercentToGlobalMemoryLimit(), PercentToOldGenerationLimit(), v8::internal::GCTracer::RecordGCSizeCounters(), ShouldOptimizeForLoadTime(), ShouldOptimizeForMemoryUsage(), ShouldStressCompaction(), stress_marking_percentage_, tracer(), using_initial_limit(), and v8::internal::v8_flags.

Referenced by ShouldExpandOldGenerationOnSlowAllocation(), and StartIncrementalMarkingIfAllocationLimitIsReached().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IncrementDeferredCounts()

void v8::internal::Heap::IncrementDeferredCounts ( base::Vector< const v8::Isolate::UseCounterFeature features)

Definition at line 944 of file heap.cc.

945  {
946  deferred_counters_.insert(deferred_counters_.end(), features.begin(),
947  features.end());
948 }
T * insert(T *pos, const T &value)
Definition: small-vector.h:199
base::SmallVector< v8::Isolate::UseCounterFeature, 8 > deferred_counters_
Definition: heap.h:2311

References v8::base::Vector< T >::begin(), deferred_counters_, v8::base::SmallVector< T, kSize, Allocator >::end(), v8::base::Vector< T >::end(), and v8::base::SmallVector< T, kSize, Allocator >::insert().

Referenced by v8::internal::Isolate::CountUsage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IncrementExternalBackingStoreBytes()

void v8::internal::Heap::IncrementExternalBackingStoreBytes ( ExternalBackingStoreType  type,
size_t  amount 
)
inlineprivate

Definition at line 401 of file heap-inl.h.

402  {
403  base::CheckedIncrement(&backing_store_bytes_, static_cast<uint64_t>(amount),
404  std::memory_order_relaxed);
405  // TODO(mlippautz): Implement interrupt for global memory allocations that can
406  // trigger garbage collections.
407 }
void CheckedIncrement(std::atomic< T > *number, T amount, std::memory_order order=std::memory_order_seq_cst) requires std
Definition: atomic-utils.h:268

References backing_store_bytes_, and v8::base::CheckedIncrement().

Referenced by v8::internal::Space::IncrementExternalBackingStoreBytes(), and v8::internal::ArrayBufferSweeper::IncrementExternalMemoryCounters().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IncrementNewSpaceSurvivingObjectSize()

void v8::internal::Heap::IncrementNewSpaceSurvivingObjectSize ( size_t  object_size)
inline

Definition at line 1364 of file heap.h.

1364  {
1365  new_space_surviving_object_size_ += object_size;
1366  }

Referenced by v8::internal::Sweeper::AddNewSpacePage().

+ Here is the caller graph for this function:

◆ IncrementNodesCopiedInNewSpace()

void v8::internal::Heap::IncrementNodesCopiedInNewSpace ( )
inline

Definition at line 1379 of file heap.h.

Referenced by v8::internal::anonymous_namespace{global-handles.cc}::UpdateListOfYoungNodesImpl().

+ Here is the caller graph for this function:

◆ IncrementNodesDiedInNewSpace()

void v8::internal::Heap::IncrementNodesDiedInNewSpace ( int  count)
inline

Definition at line 1375 of file heap.h.

1375  {
1376  nodes_died_in_new_space_ += count;
1377  }

Referenced by v8::internal::anonymous_namespace{global-handles.cc}::ClearListOfYoungNodesImpl(), and v8::internal::anonymous_namespace{global-handles.cc}::UpdateListOfYoungNodesImpl().

+ Here is the caller graph for this function:

◆ IncrementNodesPromoted()

void v8::internal::Heap::IncrementNodesPromoted ( )
inline

Definition at line 1381 of file heap.h.

1381 { nodes_promoted_++; }

Referenced by v8::internal::anonymous_namespace{global-handles.cc}::UpdateListOfYoungNodesImpl().

+ Here is the caller graph for this function:

◆ IncrementPromotedObjectsSize()

void v8::internal::Heap::IncrementPromotedObjectsSize ( size_t  object_size)
inline

Definition at line 1359 of file heap.h.

1359  {
1360  promoted_objects_size_ += object_size;
1361  }

Referenced by v8::internal::Sweeper::AddPromotedPage().

+ Here is the caller graph for this function:

◆ IncrementYoungSurvivorsCounter()

void v8::internal::Heap::IncrementYoungSurvivorsCounter ( size_t  survived)
inline

Definition at line 1383 of file heap.h.

1383  {
1384  survived_since_last_expansion_ += survived;
1385  }
size_t survived_since_last_expansion_
Definition: heap.h:2190

Referenced by v8::internal::Sweeper::AddNewSpacePage(), and v8::internal::Sweeper::AddPromotedPage().

+ Here is the caller graph for this function:

◆ InFromPage() [1/3]

bool v8::internal::Heap::InFromPage ( Tagged< HeapObject heap_object)
inlinestatic

Definition at line 254 of file heap-inl.h.

254  {
255  return MemoryChunk::FromHeapObject(heap_object)->IsFromPage();
256 }

References v8::internal::MemoryChunk::FromHeapObject(), and v8::internal::MemoryChunk::IsFromPage().

+ Here is the call graph for this function:

◆ InFromPage() [2/3]

bool v8::internal::Heap::InFromPage ( Tagged< MaybeObject object)
inlinestatic

Definition at line 248 of file heap-inl.h.

248  {
249  Tagged<HeapObject> heap_object;
250  return object.GetHeapObject(&heap_object) && InFromPage(heap_object);
251 }
static bool InFromPage(Tagged< Object > object)
Definition: heap-inl.h:242

References v8::internal::TaggedImpl< kRefType, StorageType >::GetHeapObject(), and InFromPage().

+ Here is the call graph for this function:

◆ InFromPage() [3/3]

bool v8::internal::Heap::InFromPage ( Tagged< Object object)
inlinestatic

Definition at line 242 of file heap-inl.h.

242  {
243  DCHECK(!HasWeakHeapObjectTag(object));
244  return IsHeapObject(object) && InFromPage(Cast<HeapObject>(object));
245 }
constexpr bool IsHeapObject(TaggedImpl< kRefType, StorageType > obj)
Definition: objects.h:670
static bool HasWeakHeapObjectTag(const Tagged< Object > value)
Definition: objects.h:654

References v8::internal::DCHECK(), v8::internal::HasWeakHeapObjectTag(), and v8::internal::IsHeapObject().

Referenced by v8::internal::Scavenger::CheckAndScavengeObject(), v8::internal::Scavenger::EvacuateObject(), v8::internal::ForwardingAddress(), v8::internal::anonymous_namespace{scavenger.cc}::ObjectPinningVisitorBase< ConcreteVisitor >::HandleHeapObject(), v8::internal::IterateAndScavengePromotedObjectsVisitor::HandleSlot(), InFromPage(), v8::internal::anonymous_namespace{scavenger.cc}::IsUnscavengedHeapObject(), v8::internal::Scavenger::PromoteObject(), v8::internal::Scavenger::ScavengeObject(), v8::internal::Scavenger::SemiSpaceCopyObject(), v8::internal::StringForwardingTable::Block::UpdateAfterYoungEvacuation(), v8::internal::anonymous_namespace{scavenger.cc}::GlobalHandlesWeakRootsUpdatingVisitor::UpdatePointer(), and UpdateYoungReferenceInExternalStringTableEntry().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InitializeHashSeed()

void v8::internal::Heap::InitializeHashSeed ( )

Definition at line 6087 of file heap.cc.

6087  {
6089  uint64_t new_hash_seed;
6090  if (v8_flags.hash_seed == 0) {
6091  int64_t rnd = isolate()->random_number_generator()->NextInt64();
6092  new_hash_seed = static_cast<uint64_t>(rnd);
6093  } else {
6094  new_hash_seed = static_cast<uint64_t>(v8_flags.hash_seed);
6095  }
6096  Tagged<ByteArray> hash_seed = ReadOnlyRoots(this).hash_seed();
6097  MemCopy(hash_seed->begin(), reinterpret_cast<uint8_t*>(&new_hash_seed),
6098  kInt64Size);
6099 }
int64_t NextInt64() V8_WARN_UNUSED_RESULT
base::RandomNumberGenerator * random_number_generator()
Definition: isolate.cc:6425

References v8::internal::DCHECK(), deserialization_complete_, isolate(), v8::internal::kInt64Size, v8::internal::MemCopy(), v8::base::RandomNumberGenerator::NextInt64(), v8::internal::Isolate::random_number_generator(), ReadOnlyRoots, and v8::internal::v8_flags.

Referenced by CreateImportantReadOnlyObjects(), and v8::internal::ReadOnlyDeserializer::DeserializeIntoIsolate().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InitializeMainThreadLocalHeap()

void v8::internal::Heap::InitializeMainThreadLocalHeap ( LocalHeap main_thread_local_heap)

◆ InitializeOncePerProcess()

void v8::internal::Heap::InitializeOncePerProcess ( )
static

Definition at line 6108 of file heap.cc.

6108  {
6109 #ifdef V8_ENABLE_ALLOCATION_TIMEOUT
6110  HeapAllocator::InitializeOncePerProcess();
6111 #endif
6113  if (v8_flags.predictable) {
6115  }
6116 }
static void EnforcePredictableOrder()
Definition: worklist.cc:13

References heap::base::WorklistBase::EnforcePredictableOrder(), v8::internal::MemoryAllocator::InitializeOncePerProcess(), and v8::internal::v8_flags.

Referenced by v8::internal::Isolate::InitializeOncePerProcess().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InitialSemiSpaceSize()

size_t v8::internal::Heap::InitialSemiSpaceSize ( )
inline

Definition at line 1293 of file heap.h.

1293 { return initial_semispace_size_; }

◆ InOldSpace()

bool v8::internal::Heap::InOldSpace ( Tagged< Object object)
inline

Definition at line 275 of file heap-inl.h.

275  {
276  return old_space_->Contains(object) &&
277  (!v8_flags.sticky_mark_bits || !HeapLayout::InYoungGeneration(object));
278 }

References v8::internal::PagedSpaceBase::Contains(), v8::internal::HeapLayout::InYoungGeneration(), old_space_, and v8::internal::v8_flags.

Referenced by CompactWeakArrayLists().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InsertIntoRememberedSetFromCode()

int v8::internal::Heap::InsertIntoRememberedSetFromCode ( MutablePageMetadata chunk,
size_t  slot_offset 
)
static

Definition at line 6629 of file heap.cc.

6630  {
6631  // This is called during runtime by a builtin, therefore it is run in the main
6632  // thread.
6633  DCHECK_NULL(LocalHeap::Current());
6634  RememberedSet<OLD_TO_NEW>::Insert<AccessMode::NON_ATOMIC>(chunk, slot_offset);
6635  return 0;
6636 }

References DCHECK_NULL.

◆ InSpace()

bool v8::internal::Heap::InSpace ( Tagged< HeapObject value,
AllocationSpace  space 
) const

Definition at line 4534 of file heap.cc.

4534  {
4535  if (memory_allocator()->IsOutsideAllocatedSpace(
4536  value.address(),
4538  return false;
4539  }
4540  if (!HasBeenSetUp()) return false;
4541 
4542  switch (space) {
4543  case NEW_SPACE:
4544  return new_space_->Contains(value);
4545  case OLD_SPACE:
4546  return old_space_->Contains(value);
4547  case CODE_SPACE:
4548  return code_space_->Contains(value);
4549  case SHARED_SPACE:
4550  return shared_space_->Contains(value);
4551  case TRUSTED_SPACE:
4552  return trusted_space_->Contains(value);
4553  case SHARED_TRUSTED_SPACE:
4555  case LO_SPACE:
4556  return lo_space_->Contains(value);
4557  case CODE_LO_SPACE:
4558  return code_lo_space_->Contains(value);
4559  case NEW_LO_SPACE:
4560  return new_lo_space_->Contains(value);
4561  case SHARED_LO_SPACE:
4562  return shared_lo_space_->Contains(value);
4565  case TRUSTED_LO_SPACE:
4566  return trusted_lo_space_->Contains(value);
4567  case RO_SPACE:
4568  return ReadOnlyHeap::Contains(value);
4569  }
4570  UNREACHABLE();
4571 }
@ NOT_EXECUTABLE
Definition: globals.h:1610
constexpr bool IsAnyCodeSpace(AllocationSpace space)
Definition: globals.h:1328

References v8::internal::CODE_LO_SPACE, code_lo_space_, v8::internal::CODE_SPACE, code_space_, v8::internal::PagedSpaceBase::Contains(), v8::internal::ReadOnlyHeap::Contains(), v8::internal::LargeObjectSpace::Contains(), v8::internal::NewSpace::Contains(), v8::internal::EXECUTABLE, HasBeenSetUp(), v8::internal::IsAnyCodeSpace(), v8::internal::LO_SPACE, lo_space_, memory_allocator(), v8::internal::NEW_LO_SPACE, new_lo_space_, v8::internal::NEW_SPACE, new_space_, v8::internal::NOT_EXECUTABLE, v8::internal::OLD_SPACE, old_space_, v8::internal::RO_SPACE, v8::internal::SHARED_LO_SPACE, shared_lo_space_, v8::internal::SHARED_SPACE, shared_space_, v8::internal::SHARED_TRUSTED_LO_SPACE, shared_trusted_lo_space_, v8::internal::SHARED_TRUSTED_SPACE, shared_trusted_space_, space(), v8::internal::TRUSTED_LO_SPACE, trusted_lo_space_, v8::internal::TRUSTED_SPACE, trusted_space_, v8::internal::UNREACHABLE(), and v8::internal::value.

+ Here is the call graph for this function:

◆ InSpaceSlow()

bool v8::internal::Heap::InSpaceSlow ( Address  addr,
AllocationSpace  space 
) const

Definition at line 4573 of file heap.cc.

4573  {
4574  if (memory_allocator()->IsOutsideAllocatedSpace(
4576  return false;
4577  }
4578  if (!HasBeenSetUp()) return false;
4579 
4580  switch (space) {
4581  case NEW_SPACE:
4582  return new_space_->ContainsSlow(addr);
4583  case OLD_SPACE:
4584  return old_space_->ContainsSlow(addr);
4585  case CODE_SPACE:
4586  return code_space_->ContainsSlow(addr);
4587  case SHARED_SPACE:
4588  return shared_space_->ContainsSlow(addr);
4589  case TRUSTED_SPACE:
4590  return trusted_space_->ContainsSlow(addr);
4591  case SHARED_TRUSTED_SPACE:
4592  return shared_trusted_space_->ContainsSlow(addr);
4593  case LO_SPACE:
4594  return lo_space_->ContainsSlow(addr);
4595  case CODE_LO_SPACE:
4596  return code_lo_space_->ContainsSlow(addr);
4597  case NEW_LO_SPACE:
4598  return new_lo_space_->ContainsSlow(addr);
4599  case SHARED_LO_SPACE:
4600  return shared_lo_space_->ContainsSlow(addr);
4602  return shared_trusted_lo_space_->ContainsSlow(addr);
4603  case TRUSTED_LO_SPACE:
4604  return trusted_lo_space_->ContainsSlow(addr);
4605  case RO_SPACE:
4606  return read_only_space_->ContainsSlow(addr);
4607  }
4608  UNREACHABLE();
4609 }
bool ContainsSlow(Address addr) const
virtual bool ContainsSlow(Address a) const =0
bool ContainsSlow(Address addr) const
bool ContainsSlow(Address addr) const

References v8::internal::CODE_LO_SPACE, code_lo_space_, v8::internal::CODE_SPACE, code_space_, v8::internal::NewSpace::ContainsSlow(), v8::internal::LargeObjectSpace::ContainsSlow(), v8::internal::PagedSpaceBase::ContainsSlow(), v8::internal::ReadOnlySpace::ContainsSlow(), v8::internal::EXECUTABLE, HasBeenSetUp(), v8::internal::IsAnyCodeSpace(), v8::internal::LO_SPACE, lo_space_, memory_allocator(), v8::internal::NEW_LO_SPACE, new_lo_space_, v8::internal::NEW_SPACE, new_space_, v8::internal::NOT_EXECUTABLE, v8::internal::OLD_SPACE, old_space_, read_only_space_, v8::internal::RO_SPACE, v8::internal::SHARED_LO_SPACE, shared_lo_space_, v8::internal::SHARED_SPACE, shared_space_, v8::internal::SHARED_TRUSTED_LO_SPACE, shared_trusted_lo_space_, v8::internal::SHARED_TRUSTED_SPACE, shared_trusted_space_, space(), v8::internal::TRUSTED_LO_SPACE, trusted_lo_space_, v8::internal::TRUSTED_SPACE, trusted_space_, and v8::internal::UNREACHABLE().

Referenced by v8::internal::anonymous_namespace{frames.cc}::IsInterpreterFramePc(), and TryFindCodeForInnerPointerForPrinting().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InToPage() [1/3]

bool v8::internal::Heap::InToPage ( Tagged< HeapObject heap_object)
inlinestatic

Definition at line 271 of file heap-inl.h.

271  {
272  return MemoryChunk::FromHeapObject(heap_object)->IsToPage();
273 }

References v8::internal::MemoryChunk::FromHeapObject(), and v8::internal::MemoryChunk::IsToPage().

+ Here is the call graph for this function:

◆ InToPage() [2/3]

bool v8::internal::Heap::InToPage ( Tagged< MaybeObject object)
inlinestatic

Definition at line 265 of file heap-inl.h.

265  {
266  Tagged<HeapObject> heap_object;
267  return object.GetHeapObject(&heap_object) && InToPage(heap_object);
268 }
static bool InToPage(Tagged< Object > object)
Definition: heap-inl.h:259

References v8::internal::TaggedImpl< kRefType, StorageType >::GetHeapObject(), and InToPage().

+ Here is the call graph for this function:

◆ InToPage() [3/3]

bool v8::internal::Heap::InToPage ( Tagged< Object object)
inlinestatic

Definition at line 259 of file heap-inl.h.

259  {
260  DCHECK(!HasWeakHeapObjectTag(object));
261  return IsHeapObject(object) && InToPage(Cast<HeapObject>(object));
262 }

References v8::internal::DCHECK(), v8::internal::HasWeakHeapObjectTag(), and v8::internal::IsHeapObject().

Referenced by v8::internal::Scavenger::CheckAndScavengeObject(), InToPage(), v8::internal::anonymous_namespace{minor-mark-sweep.cc}::IsUnmarkedObjectInYoungGeneration(), v8::internal::Scavenger::PromoteObject(), v8::internal::Scavenger::ScavengeObject(), v8::internal::Scavenger::SemiSpaceCopyObject(), and v8::internal::anonymous_namespace{scavenger.cc}::GlobalHandlesWeakRootsUpdatingVisitor::UpdatePointer().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InvokeIncrementalMarkingEpilogueCallbacks()

void v8::internal::Heap::InvokeIncrementalMarkingEpilogueCallbacks ( )
private

Definition at line 4078 of file heap.cc.

4078  {
4079  AllowGarbageCollection allow_allocation;
4080  VMState<EXTERNAL> state(isolate_);
4082  GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_EPILOGUE);
4083 }
@ kGCTypeIncrementalMarking
Definition: v8-callbacks.h:158

References CallGCEpilogueCallbacks(), isolate_, v8::kGCTypeIncrementalMarking, and v8::kNoGCCallbackFlags.

Referenced by v8::internal::IncrementalMarking::StartMarkingMajor().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InvokeIncrementalMarkingPrologueCallbacks()

void v8::internal::Heap::InvokeIncrementalMarkingPrologueCallbacks ( )
private

Definition at line 4071 of file heap.cc.

4071  {
4072  AllowGarbageCollection allow_allocation;
4073  VMState<EXTERNAL> state(isolate_);
4075  GCTracer::Scope::MC_INCREMENTAL_EXTERNAL_PROLOGUE);
4076 }

References CallGCPrologueCallbacks(), isolate_, v8::kGCTypeIncrementalMarking, and v8::kNoGCCallbackFlags.

Referenced by v8::internal::IncrementalMarking::StartMarkingMajor().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InvokeNearHeapLimitCallback()

bool v8::internal::Heap::InvokeNearHeapLimitCallback ( )
private

Definition at line 4405 of file heap.cc.

4405  {
4406  if (!near_heap_limit_callbacks_.empty()) {
4407  AllowGarbageCollection allow_gc;
4408  TRACE_GC(tracer(), GCTracer::Scope::HEAP_EXTERNAL_NEAR_HEAP_LIMIT);
4409  VMState<EXTERNAL> callback_state(isolate());
4410  HandleScope scope(isolate());
4411  v8::NearHeapLimitCallback callback =
4412  near_heap_limit_callbacks_.back().first;
4413  void* data = near_heap_limit_callbacks_.back().second;
4414  size_t heap_limit = callback(data, max_old_generation_size(),
4416  if (heap_limit > max_old_generation_size()) {
4418  std::min(heap_limit, AllocatorLimitOnMaxOldGenerationSize()));
4419  return true;
4420  }
4421  }
4422  return false;
4423 }
size_t(*)(void *data, size_t current_heap_limit, size_t initial_heap_limit) NearHeapLimitCallback
This callback is invoked when the heap size is close to the heap limit and V8 is likely to abort with...
Definition: v8-callbacks.h:204

References AllocatorLimitOnMaxOldGenerationSize(), initial_max_old_generation_size_, isolate(), max_old_generation_size(), near_heap_limit_callbacks_, SetOldGenerationAndGlobalMaximumSize(), TRACE_GC, and tracer().

Referenced by CheckHeapLimitReached(), CheckIneffectiveMarkCompact(), and CollectAllAvailableGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ is_current_gc_forced()

bool v8::internal::Heap::is_current_gc_forced ( ) const
inline

Definition at line 1412 of file heap.h.

1412 { return is_current_gc_forced_; }

◆ is_finalization_registry_cleanup_task_posted()

bool v8::internal::Heap::is_finalization_registry_cleanup_task_posted ( )
inline

Definition at line 949 of file heap.h.

949  {
951  }
bool is_finalization_registry_cleanup_task_posted_
Definition: heap.h:2454

◆ IsAllocationObserverActive()

bool v8::internal::Heap::IsAllocationObserverActive ( ) const
inline

Definition at line 581 of file heap.h.

581  {
583  }
int pause_allocation_observers_depth_
Definition: heap.h:2445

◆ IsFreeSpaceValid()

bool v8::internal::Heap::IsFreeSpaceValid ( const FreeSpace object)
static

Definition at line 6424 of file heap.cc.

6424  {
6425  Heap* heap = HeapUtils::GetOwnerHeap(object);
6426  Tagged<Object> free_space_map =
6427  heap->isolate()->root(RootIndex::kFreeSpaceMap);
6428  CHECK(!heap->deserialization_complete() ||
6429  object->map_slot().contains_map_value(free_space_map.ptr()));
6430  CHECK_LE(offsetof(FreeSpace, next_) + kTaggedSize,
6431  object->size(kRelaxedLoad));
6432  return true;
6433 }
static Heap * GetOwnerHeap(Tagged< HeapObject > object)
#define CHECK_LE(lhs, rhs)

References CHECK, CHECK_LE, v8::internal::FullObjectSlot::contains_map_value(), v8::internal::HeapUtils::GetOwnerHeap(), v8::kRelaxedLoad, v8::internal::kTaggedSize, v8::internal::HeapObjectLayout::map_slot(), v8::internal::TaggedImpl< kRefType, StorageType >::ptr(), and v8::internal::FreeSpace::size().

Referenced by v8::internal::FreeSpace::IsValid().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsGCWithMainThreadStack()

bool v8::internal::Heap::IsGCWithMainThreadStack ( ) const

Definition at line 527 of file heap.cc.

527  {
528  return embedder_stack_state_ == StackState::kMayContainHeapPointers;
529 }
StackState embedder_stack_state_
Definition: heap.h:2374

References embedder_stack_state_.

Referenced by IsGCWithStack(), and IterateConservativeStackRoots().

+ Here is the caller graph for this function:

◆ IsGCWithStack()

bool v8::internal::Heap::IsGCWithStack ( ) const

Definition at line 531 of file heap.cc.

531  {
533 }
bool HasBackgroundStacks() const
Definition: stack.h:119
bool IsGCWithMainThreadStack() const
Definition: heap.cc:527

References heap::base::Stack::HasBackgroundStacks(), IsGCWithMainThreadStack(), and stack().

Referenced by CanShortcutStringsDuringGC(), v8::internal::ScavengerCollector::CollectGarbage(), IterateConservativeStackRoots(), v8::internal::MinorMarkSweepCollector::MarkRootsFromConservativeStack(), and v8::internal::MarkCompactCollector::StartCompaction().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsImmovable()

bool v8::internal::Heap::IsImmovable ( Tagged< HeapObject object)

Definition at line 3422 of file heap.cc.

3422  {
3423  MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
3424  return chunk->NeverEvacuate() || chunk->IsLargePage();
3425 }

References v8::internal::MemoryChunk::FromHeapObject(), v8::internal::MemoryChunk::IsLargePage(), and v8::internal::MemoryChunk::NeverEvacuate().

+ Here is the call graph for this function:

◆ IsIneffectiveMarkCompact()

bool v8::internal::Heap::IsIneffectiveMarkCompact ( size_t  old_generation_size,
double  mutator_utilization 
)
private

Definition at line 3848 of file heap.cc.

3849  {
3850  const double kHighHeapPercentage = 0.8;
3851  const double kLowMutatorUtilization = 0.4;
3852  return old_generation_size >=
3853  kHighHeapPercentage * max_old_generation_size() &&
3854  mutator_utilization < kLowMutatorUtilization;
3855 }

References max_old_generation_size().

Referenced by CheckIneffectiveMarkCompact().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsInGC()

bool v8::internal::Heap::IsInGC ( ) const
inline

Definition at line 569 of file heap.h.

569  {
570  // Load state only once and store it in local variable. Otherwise multiples
571  // loads could return different states on background threads.
572  HeapState state = gc_state();
573  return state != NOT_IN_GC && state != TEAR_DOWN;
574  }

Referenced by v8::internal::HeapAllocator::AllocateRaw(), and v8::internal::TickSample::GetStackSample().

+ Here is the caller graph for this function:

◆ IsInlineAllocationEnabled()

bool v8::internal::Heap::IsInlineAllocationEnabled ( ) const
inline

Definition at line 1685 of file heap.h.

1685 { return inline_allocation_enabled_; }

◆ IsLargeObject()

bool v8::internal::Heap::IsLargeObject ( Tagged< HeapObject object)
static

Definition at line 3427 of file heap.cc.

3427  {
3428  return MemoryChunk::FromHeapObject(object)->IsLargePage();
3429 }

References v8::internal::MemoryChunk::FromHeapObject(), and v8::internal::MemoryChunk::IsLargePage().

Referenced by CanMoveObjectStart(), LeftTrimFixedArray(), v8::internal::String::MakeExternal(), v8::internal::String::MakeExternalDuringGC(), v8::internal::String::MakeThin(), NotifyObjectSizeChange(), RightTrimArray(), v8::internal::Scavenger::ScavengeObject(), v8::internal::JSObject::UpdateAllocationSite(), and v8::internal::anonymous_namespace{scavenger.cc}::GlobalHandlesWeakRootsUpdatingVisitor::UpdatePointer().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsLastResortGC()

bool v8::internal::Heap::IsLastResortGC ( )
inline

Definition at line 1675 of file heap.h.

Referenced by v8::internal::anonymous_namespace{mark-compact.cc}::GetCodeFlushMode().

+ Here is the caller graph for this function:

◆ IsMainThread()

◆ IsMarkingFlagAddress()

uint8_t * v8::internal::Heap::IsMarkingFlagAddress ( )

Definition at line 7393 of file heap.cc.

7393  {
7394  return &isolate()->isolate_data()->is_marking_flag_;
7395 }
const IsolateData * isolate_data() const
Definition: isolate.h:1222

References v8::internal::IsolateData::is_marking_flag_, isolate(), and v8::internal::Isolate::isolate_data().

+ Here is the call graph for this function:

◆ IsMinorMarkingFlagAddress()

uint8_t * v8::internal::Heap::IsMinorMarkingFlagAddress ( )

Definition at line 7401 of file heap.cc.

7401  {
7403 }

References v8::internal::IsolateData::is_minor_marking_flag_, isolate(), and v8::internal::Isolate::isolate_data().

+ Here is the call graph for this function:

◆ IsNewSpaceAllowedToGrowAboveTargetCapacity()

bool v8::internal::Heap::IsNewSpaceAllowedToGrowAboveTargetCapacity ( ) const

Definition at line 5573 of file heap.cc.

5573  {
5574  return always_allocate() || gc_state() == TEAR_DOWN ||
5576 }

References always_allocate(), gc_state(), incremental_marking(), v8::internal::IncrementalMarking::IsMarking(), and TEAR_DOWN.

+ Here is the call graph for this function:

◆ isolate()

Isolate * v8::internal::Heap::isolate ( ) const
inline

Definition at line 61 of file heap-inl.h.

61 { return Isolate::FromHeap(this); }
static Isolate * FromHeap(const Heap *heap)
Definition: isolate.h:1217

References v8::internal::Isolate::FromHeap().

Referenced by ActivateMemoryReducerIfNeededOnMainThread(), v8::internal::V8HeapExplorer::AddEntry(), v8::internal::MarkCompactCollector::AddEvacuationCandidate(), AddGCEpilogueCallback(), AddGCPrologueCallback(), AddRetainedMaps(), v8::internal::Heap::ExternalStringTable::AddString(), Allocate(), AllocateExternalBackingStore(), AllocateMap(), AllocatePartialMap(), v8::internal::HeapAllocator::AllocateRaw(), v8::internal::HeapAllocator::AllocateRawWithRetryOrFailSlowPath(), v8::internal::Heap::AllocationTrackerForDebugging::AllocationEvent(), AttachCppHeap(), v8::internal::CollectionBarrier::AwaitCollectionBackground(), v8::internal::EvacuateVisitorBase::cage_base(), CallGCEpilogueCallbacks(), CallGCPrologueCallbacks(), v8::internal::MinorGCJob::CancelTaskIfScheduled(), CanMoveObjectStart(), CanReferenceHeapObject(), CanShortcutStringsDuringGC(), CheckMemoryPressure(), v8::internal::Heap::ExternalStringTable::CleanUpAll(), v8::internal::Heap::ExternalStringTable::CleanUpYoung(), v8::internal::HeapProfiler::ClearHeapObjectMap(), ClearKeptObjects(), v8::internal::MinorMarkSweepCollector::ClearNonLiveReferences(), v8::internal::anonymous_namespace{heap.cc}::ClearStubCaches(), v8::internal::LocalHandleScope::CloseMainThreadScope(), CollectAllAvailableGarbage(), CollectCodeStatistics(), v8::internal::MarkCompactCollector::CollectEvacuationCandidates(), v8::internal::MinorMarkSweepCollector::CollectGarbage(), CollectGarbage(), CollectGarbageFromAnyThread(), CollectGarbageShared(), v8::internal::V8HeapExplorer::CollectTemporaryGlobalObjectsTags(), CompactWeakArrayLists(), ComputeMutatorUtilization(), v8::internal::ConcurrentUnifiedHeapMarkingVisitor::ConcurrentUnifiedHeapMarkingVisitor(), v8::internal::ConservativePinningScope::ConservativePinningScope(), CreateEarlyReadOnlyMapsAndObjects(), CreateImportantReadOnlyObjects(), CreateInitialMutableObjects(), CreateInternalAccessorInfoObjects(), CreateLateReadOnlyJSReceiverMaps(), CreateMutableApiObjects(), CreateReadOnlyApiObjects(), CreateReadOnlyHeapObjects(), CreateReadOnlyObjects(), DeactivateMajorGCInProgressFlag(), v8::internal::ArrayBufferSweeper::DecrementExternalMemoryCounters(), DequeueDirtyJSFinalizationRegistry(), v8::internal::MinorMarkSweepCollector::DrainMarkingWorklist(), DumpJSONHeapStatistics(), EnqueueDirtyJSFinalizationRegistry(), v8::internal::ReadOnlySpace::EnsurePage(), v8::internal::LocalHeap::EnsurePersistentHandles(), v8::internal::MinorMarkSweepCollector::EvacuateExternalPointerReferences(), v8::internal::EvacuateVisitorBase::EvacuateVisitorBase(), v8::internal::EvacuationAllocator::EvacuationAllocator(), FatalProcessOutOfMemory(), FinalizeExternalString(), FinalizePartialMap(), FindAllNativeContexts(), FindAllRetainedMaps(), v8::internal::MarkCompactCollector::Finish(), v8::internal::Sweeper::FinishMajorJobs(), v8::internal::Sweeper::FinishMinorJobs(), FreeLinearAllocationAreas(), GarbageCollectionEpilogueInSafepoint(), GcSafeInstructionStreamContains(), GcSafeMapOfHeapObject(), GcSafeTryFindCodeForInnerPointer(), v8::internal::HeapSnapshotGenerator::GenerateSnapshot(), v8::internal::HeapSnapshotGenerator::GenerateSnapshotAfterGC(), GetForegroundTaskRunner(), v8::internal::V8HeapExplorer::GetLocationFunction(), v8::internal::LocalHandleScope::GetMainThreadHandle(), HasDirtyJSFinalizationRegistries(), v8::internal::HeapAllocator::heap_for_allocation(), IncrementalMarkingLimitReached(), v8::internal::ArrayBufferSweeper::IncrementExternalMemoryCounters(), v8::internal::WriteBarrier::IndirectPointerMarkingFromCode(), InitializeHashSeed(), v8::internal::anonymous_namespace{heap.cc}::InvokeExternalCallbacks(), InvokeNearHeapLimitCallback(), v8::internal::V8HeapExplorer::IsEssentialObject(), IsMainThread(), IsMarkingFlagAddress(), IsMinorMarkingFlagAddress(), v8::internal::ObjectStatsCollectorImpl::isolate(), v8::internal::ObjectStats::isolate(), v8::internal::IncrementalMarking::isolate(), v8::internal::MarkingBarrier::isolate(), v8::internal::IsolateSafepoint::isolate(), v8::internal::HeapProfiler::isolate(), v8::internal::MarkCompactCollector::IsUnmarkedSharedHeapObject(), v8::internal::V8HeapExplorer::IterateAndExtractReferences(), IterateBuiltins(), IterateConservativeStackRoots(), IterateRootsForPrecisePinning(), IterateRootsIncludingClients(), IterateSmiRoots(), IterateWeakRoots(), KeepDuringJob(), LeftTrimFixedArray(), v8::internal::LocalHeap::LocalHeap(), v8::internal::V8HeapExplorer::MakeGlobalObjectTagMap(), MakeLinearAllocationAreasIterable(), v8::internal::WriteBarrier::MarkingFromCode(), v8::internal::WriteBarrier::MarkingSlow(), v8::internal::WriteBarrier::MarkingSlowFromCppHeapWrappable(), v8::internal::MarkCompactCollector::MarkObjectsFromClientHeaps(), v8::internal::MarkingBarrier::MarkRange(), v8::internal::MarkCompactCollector::MarkRoots(), v8::internal::MinorMarkSweepCollector::MarkRoots(), v8::internal::MarkCompactCollector::MarkRootsFromConservativeStack(), v8::internal::MinorMarkSweepCollector::MarkRootsFromConservativeStack(), v8::internal::MinorMarkSweepCollector::MarkRootsFromTracedHandles(), MeasureMemory(), MemoryPressureNotification(), v8::internal::PretenuringHandler::MergeAllocationSitePretenuringFeedback(), v8::internal::ProfilingMigrationObserver::Move(), MustBeInSharedOldSpace(), NewSpaceAllocationLimitAddress(), NewSpaceAllocationTopAddress(), v8::internal::StressScavengeObserver::NextLimit(), NextStressMarkingLimit(), NotifyContextDisposed(), v8::internal::GCTracer::NotifyFullSweepingCompletedAndStopCycleIfFinished(), v8::internal::MemoryReducer::NotifyMarkCompact(), v8::internal::GCTracer::NotifyMarkingStart(), NotifyObjectLayoutChange(), NotifyOldGenerationExpansion(), v8::internal::MemoryReducer::NotifyTimer(), NumberOfNativeContexts(), v8::internal::IncrementalMarking::OldGenerationSizeOfObjects(), v8::internal::LocalHandleScope::OpenMainThreadScope(), PauseConcurrentThreadsInClients(), PerformGarbageCollection(), PerformHeapVerification(), v8::internal::MemoryBalancer::PostHeartbeatTask(), v8::internal::GCTracer::Print(), v8::internal::GCTracer::PrintNVP(), ProcessDirtyJSFinalizationRegistries(), v8::internal::PretenuringHandler::ProcessPretenuringFeedback(), v8::internal::GCTracer::RecordGCPhasesHistograms(), v8::internal::MarkCompactCollector::RecordSlot(), v8::internal::FieldStatsCollector::RecordStats(), v8::internal::ObjectStatsCollectorImpl::RecordVirtualFeedbackVectorDetails(), v8::internal::PagedSpaceAllocatorPolicy::RefillLab(), v8::internal::MemoryBalancer::RefreshLimit(), RegisterStrongRoots(), RemoveDirtyFinalizationRegistriesOnContext(), v8::internal::CodeLargeObjectSpace::RemovePage(), v8::internal::GCTracer::ReportFullCycleToRecorder(), v8::internal::GCTracer::ReportIncrementalMarkingStepToRecorder(), v8::internal::GCTracer::ReportIncrementalSweepingStepToRecorder(), ReportStatisticsAfterGC(), v8::internal::GCTracer::ReportYoungCycleToRecorder(), v8::internal::StressScavengeObserver::RequestedGCDone(), v8::internal::MinorMarkSweepCollector::RequestGC(), ResumeConcurrentThreadsInClients(), roots_table(), v8::internal::ArrayBufferSweeper::SweepingState::SweepingJob::Run(), v8::internal::Sweeper::MajorSweeperJob::Run(), v8::internal::Sweeper::MinorSweeperJob::Run(), v8::internal::ConcurrentMarking::JobTaskMajor::Run(), v8::internal::ConcurrentMarking::JobTaskMinor::Run(), v8::internal::ScavengerCollector::QuarantinedPageSweeper::JobTask::Run(), v8::internal::Sweeper::MajorSweeperJob::RunImpl(), v8::internal::Sweeper::MinorSweeperJob::RunImpl(), v8::internal::BackgroundCollectionInterruptTask::RunInternal(), v8::internal::FinalizationRegistryCleanupTask::RunInternal(), v8::internal::IncrementalMarkingJob::Task::RunInternal(), v8::internal::ConcurrentMarking::RunMajor(), v8::internal::ConcurrentMarking::RunMinor(), v8::internal::ConcurrentMarking::RunMinorImpl(), v8::internal::Scavenger::ScavengePage(), v8::internal::IncrementalMarkingJob::ScheduleTask(), SetFunctionsMarkedForManualOptimization(), SetIsMarkingFlag(), SetIsMinorMarkingFlag(), SetOldGenerationAndGlobalAllocationLimit(), SetSerializedGlobalProxySizes(), SetSerializedObjects(), v8::internal::HeapAllocator::Setup(), SetUp(), v8::internal::LocalHeap::SetUpSharedMarking(), SetUpSpaces(), v8::internal::WriteBarrier::SharedMarkingFromCode(), ShouldOptimizeForBattery(), ShouldOptimizeForMemoryUsage(), ShouldUseBackgroundThreads(), ShouldUseIncrementalMarking(), v8::internal::FinalizationRegistryCleanupTask::SlowAssertNoActiveJavaScript(), v8::internal::MarkCompactCollector::StartCompaction(), v8::internal::GCTracer::StartCycle(), v8::internal::HeapProfiler::StartHeapObjectsTracking(), StartIncrementalMarking(), StartIncrementalMarkingIfAllocationLimitIsReached(), v8::internal::MarkCompactCollector::StartMarking(), v8::internal::MinorMarkSweepCollector::StartSweepNewSpace(), v8::internal::MinorMarkSweepCollector::StartSweepNewSpaceWithStickyBits(), v8::internal::StressScavengeObserver::Step(), v8::internal::StressConcurrentAllocationObserver::Step(), v8::internal::GCTracer::StopCycle(), v8::internal::GCTracer::StopObservablePause(), v8::internal::IncrementalMarking::StopPointerTableBlackAllocation(), v8::internal::CollectionBarrier::StopTimeToCollectionTimer(), v8::internal::StressScavengeObserver::StressScavengeObserver(), v8::internal::HeapProfiler::TakeSnapshot(), TearDown(), v8::internal::MinorMarkSweepCollector::TraceFragmentation(), v8::internal::UnreachableObjectsFilter::MarkingVisitor::TransitiveClosure(), TryFindCodeForInnerPointerForPrinting(), v8::internal::HeapAllocator::TryResizeLargeObject(), v8::internal::MinorGCJob::TryScheduleTask(), Unmark(), UnregisterStrongRoots(), v8::internal::HeapObjectsMap::UpdateHeapObjectsMap(), v8::internal::EvacuateOldSpaceVisitor::Visit(), v8::internal::InternalizedStringTableCleaner::VisitRootPointers(), v8::internal::RootsReferencesExtractor::VisitRootPointers(), WeakenDescriptorArrays(), v8::internal::LocalHeap::~LocalHeap(), and v8::internal::SemiSpace::~SemiSpace().

+ Here is the call graph for this function:

◆ IsOldGenerationExpansionAllowed()

bool v8::internal::Heap::IsOldGenerationExpansionAllowed ( size_t  size,
const base::MutexGuard expansion_mutex_witness 
) const

Definition at line 421 of file heap.cc.

422  {
424 }

References max_old_generation_size(), OldGenerationCapacity(), and size().

+ Here is the call graph for this function:

◆ IsPendingAllocation() [1/2]

bool v8::internal::Heap::IsPendingAllocation ( Tagged< HeapObject object)
inline

Definition at line 348 of file heap-inl.h.

348  {
349  bool result = IsPendingAllocationInternal(object);
350  if (v8_flags.trace_pending_allocations && result) {
351  StdoutStream{} << "Pending allocation: " << std::hex << "0x" << object.ptr()
352  << "\n";
353  }
354  return result;
355 }
bool IsPendingAllocationInternal(Tagged< HeapObject > object)
Definition: heap-inl.h:297

References IsPendingAllocationInternal(), v8::base::internal::result, and v8::internal::v8_flags.

Referenced by IsPendingAllocation(), v8::internal::compiler::JSHeapBroker::ObjectMayBeUninitialized(), v8::internal::GlobalDictionary::TryFindPropertyCellForConcurrentLookupIterator(), and v8::internal::SwissNameDictionary::TryValueAt().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsPendingAllocation() [2/2]

bool v8::internal::Heap::IsPendingAllocation ( Tagged< Object object)
inline

Definition at line 357 of file heap-inl.h.

357  {
358  return IsHeapObject(object) && IsPendingAllocation(Cast<HeapObject>(object));
359 }
bool IsPendingAllocation(Tagged< HeapObject > object)
Definition: heap-inl.h:348

References v8::internal::IsHeapObject(), and IsPendingAllocation().

+ Here is the call graph for this function:

◆ IsPendingAllocationInternal()

bool v8::internal::Heap::IsPendingAllocationInternal ( Tagged< HeapObject object)
inlineprivate

Definition at line 297 of file heap-inl.h.

297  {
299 
300  MemoryChunk* chunk = MemoryChunk::FromHeapObject(object);
301  if (chunk->InReadOnlySpace()) return false;
302 
303  BaseSpace* base_space = chunk->Metadata()->owner();
304  Address addr = object.address();
305 
306  switch (base_space->identity()) {
307  case NEW_SPACE: {
309  }
310 
311  case OLD_SPACE: {
313  }
314 
315  case CODE_SPACE: {
317  }
318 
319  case TRUSTED_SPACE: {
321  }
322 
323  case LO_SPACE:
324  case CODE_LO_SPACE:
325  case TRUSTED_LO_SPACE:
326  case NEW_LO_SPACE: {
327  LargeObjectSpace* large_space =
328  static_cast<LargeObjectSpace*>(base_space);
329  base::MutexGuard guard(large_space->pending_allocation_mutex());
330  return addr == large_space->pending_object();
331  }
332 
333  case SHARED_SPACE:
334  case SHARED_LO_SPACE:
337  // TODO(v8:13267): Ensure that all shared space objects have a memory
338  // barrier after initialization.
339  return false;
340 
341  case RO_SPACE:
342  UNREACHABLE();
343  }
344 
345  UNREACHABLE();
346 }
MainAllocator * new_space_allocator()
MainAllocator * code_space_allocator()
MainAllocator * trusted_space_allocator()
MainAllocator * old_space_allocator()
friend class LargeObjectSpace
Definition: heap.h:2501
V8_EXPORT_PRIVATE bool IsPendingAllocation(Address object_address)

References allocator(), v8::internal::CODE_LO_SPACE, v8::internal::CODE_SPACE, v8::internal::HeapAllocator::code_space_allocator(), v8::internal::DCHECK(), deserialization_complete(), v8::internal::MemoryChunk::FromHeapObject(), v8::internal::BaseSpace::identity(), v8::internal::MemoryChunk::InReadOnlySpace(), v8::internal::MainAllocator::IsPendingAllocation(), v8::internal::LO_SPACE, v8::internal::MemoryChunk::Metadata(), v8::internal::NEW_LO_SPACE, v8::internal::NEW_SPACE, v8::internal::HeapAllocator::new_space_allocator(), v8::internal::OLD_SPACE, v8::internal::HeapAllocator::old_space_allocator(), v8::internal::MemoryChunkMetadata::owner(), v8::internal::LargeObjectSpace::pending_allocation_mutex(), v8::internal::LargeObjectSpace::pending_object(), v8::internal::RO_SPACE, v8::internal::SHARED_LO_SPACE, v8::internal::SHARED_SPACE, v8::internal::SHARED_TRUSTED_LO_SPACE, v8::internal::SHARED_TRUSTED_SPACE, v8::internal::TRUSTED_LO_SPACE, v8::internal::TRUSTED_SPACE, v8::internal::HeapAllocator::trusted_space_allocator(), and v8::internal::UNREACHABLE().

Referenced by IsPendingAllocation().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsRegularObjectAllocation()

static bool v8::internal::Heap::IsRegularObjectAllocation ( AllocationType  allocation)
inlinestaticprivate

Definition at line 1776 of file heap.h.

1776  {
1777  return AllocationType::kYoung == allocation ||
1778  AllocationType::kOld == allocation;
1779  }

◆ IsStressingScavenge()

bool v8::internal::Heap::IsStressingScavenge ( )
private

Definition at line 7385 of file heap.cc.

7385  {
7386  return v8_flags.stress_scavenge > 0 && new_space();
7387 }

References new_space(), and v8::internal::v8_flags.

Referenced by HandleGCRequest(), SetUpSpaces(), and TearDown().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsTearingDown()

◆ IsValidAllocationSpace()

bool v8::internal::Heap::IsValidAllocationSpace ( AllocationSpace  space)
static

Definition at line 4622 of file heap.cc.

4622  {
4623  switch (space) {
4624  case NEW_SPACE:
4625  case OLD_SPACE:
4626  case CODE_SPACE:
4627  case SHARED_SPACE:
4628  case LO_SPACE:
4629  case NEW_LO_SPACE:
4630  case CODE_LO_SPACE:
4631  case SHARED_LO_SPACE:
4632  case TRUSTED_SPACE:
4633  case SHARED_TRUSTED_SPACE:
4634  case TRUSTED_LO_SPACE:
4636  case RO_SPACE:
4637  return true;
4638  default:
4639  return false;
4640  }
4641 }

References v8::internal::CODE_LO_SPACE, v8::internal::CODE_SPACE, v8::internal::LO_SPACE, v8::internal::NEW_LO_SPACE, v8::internal::NEW_SPACE, v8::internal::OLD_SPACE, v8::internal::RO_SPACE, v8::internal::SHARED_LO_SPACE, v8::internal::SHARED_SPACE, v8::internal::SHARED_TRUSTED_LO_SPACE, v8::internal::SHARED_TRUSTED_SPACE, space(), v8::internal::TRUSTED_LO_SPACE, and v8::internal::TRUSTED_SPACE.

Referenced by v8::Isolate::GetHeapSpaceStatistics().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsYoungGenerationCollector()

static bool v8::internal::Heap::IsYoungGenerationCollector ( GarbageCollector  collector)
inlinestatic

◆ IterateBuiltins()

void v8::internal::Heap::IterateBuiltins ( RootVisitor v)

Definition at line 4967 of file heap.cc.

4967  {
4968  Builtins* builtins = isolate()->builtins();
4969  for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLast;
4970  ++builtin) {
4971  const char* name = Builtins::name(builtin);
4972  v->VisitRootPointer(Root::kBuiltins, name, builtins->builtin_slot(builtin));
4973  }
4974 
4975  for (Builtin builtin = Builtins::kFirst; builtin <= Builtins::kLastTier0;
4976  ++builtin) {
4977  v->VisitRootPointer(Root::kBuiltins, Builtins::name(builtin),
4978  builtins->builtin_tier0_slot(builtin));
4979  }
4980 
4981  // The entry table doesn't need to be updated since all builtins are embedded.
4983 }
static constexpr Builtin kFirst
Definition: builtins.h:119
static constexpr bool AllBuiltinsAreIsolateIndependent()
Definition: builtins.h:346
static constexpr Builtin kLast
Definition: builtins.h:120
static constexpr Builtin kLastTier0
Definition: builtins.h:121
static V8_EXPORT_PRIVATE const char * name(Builtin builtin)
Definition: builtins.cc:226
Builtins * builtins()
Definition: isolate.h:1458

References v8::internal::Builtins::AllBuiltinsAreIsolateIndependent(), v8::internal::Builtins::builtin_slot(), v8::internal::Builtins::builtin_tier0_slot(), v8::internal::Isolate::builtins(), isolate(), v8::internal::Builtins::kFirst, v8::internal::Builtins::kLast, v8::internal::Builtins::kLastTier0, v8::internal::Builtins::name(), v8::internal::name, and v8::internal::RootVisitor::VisitRootPointer().

Referenced by IterateRoots().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IterateConservativeStackRoots() [1/2]

void v8::internal::Heap::IterateConservativeStackRoots ( ::heap::base::StackVisitor stack_visitor,
StackScanMode  stack_scan_mode 
)

Definition at line 5005 of file heap.cc.

5006  {
5007  DCHECK(IsGCWithStack());
5008  DCHECK_NE(stack_scan_mode, StackScanMode::kNone);
5009 
5010  if (stack_scan_mode == StackScanMode::kSelective) {
5014  stack_visitor, selective_stack_scan_start_address_.value());
5015  return;
5016  }
5017 
5018  DCHECK_EQ(stack_scan_mode, StackScanMode::kFull);
5019  if (IsGCWithMainThreadStack()) {
5020  stack().IteratePointersUntilMarker(stack_visitor);
5021  }
5022  stack().IterateBackgroundStacks(stack_visitor);
5023 }
void IteratePointersUntilMarker(StackVisitor *visitor) const
Definition: stack.cc:161
void IterateBackgroundStacks(StackVisitor *visitor) const
Definition: stack.cc:186
void IteratePointersFromAddressUntilMarker(StackVisitor *visitor, const void *address) const
Definition: stack.cc:165

References v8::internal::DCHECK(), DCHECK_EQ, DCHECK_NE, IsGCWithMainThreadStack(), IsGCWithStack(), heap::base::Stack::IterateBackgroundStacks(), heap::base::Stack::IteratePointersFromAddressUntilMarker(), heap::base::Stack::IteratePointersUntilMarker(), kFull, kNone, kSelective, selective_stack_scan_start_address_, and stack().

+ Here is the call graph for this function:

◆ IterateConservativeStackRoots() [2/2]

void v8::internal::Heap::IterateConservativeStackRoots ( RootVisitor root_visitor,
IterateRootsMode  roots_mode = IterateRootsMode::kMainIsolate 
)

Definition at line 4987 of file heap.cc.

4988  {
4989  const StackScanMode stack_scan_mode =
4992  IsGCWithStack());
4993  if ((stack_scan_mode == StackScanMode::kNone) || !IsGCWithStack()) return;
4994 
4995  // In case of a shared GC, we're interested in the main isolate for CSS.
4996  Isolate* main_isolate = roots_mode == IterateRootsMode::kClientIsolate
4998  : isolate();
4999 
5000  ConservativeStackVisitor stack_visitor(main_isolate, root_visitor);
5001 
5002  IterateConservativeStackRoots(&stack_visitor, stack_scan_mode);
5003 }
StackScanMode ConservativeStackScanningModeForMajorGC() const
Definition: heap.h:387
void IterateConservativeStackRoots(RootVisitor *root_visitor, IterateRootsMode roots_mode=IterateRootsMode::kMainIsolate)
Definition: heap.cc:4987

References ConservativeStackScanningModeForMajorGC(), DCHECK_IMPLIES, IsGCWithStack(), isolate(), kClientIsolate, kNone, kSelective, and v8::internal::Isolate::shared_space_isolate().

Referenced by v8::internal::ScavengerCollector::CollectGarbage(), IterateRoots(), v8::internal::MarkCompactCollector::MarkRootsFromConservativeStack(), and v8::internal::MinorMarkSweepCollector::MarkRootsFromConservativeStack().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IterateRoots()

void v8::internal::Heap::IterateRoots ( RootVisitor v,
base::EnumSet< SkipRoot options,
IterateRootsMode  roots_mode = IterateRootsMode::kMainIsolate 
)

Definition at line 4808 of file heap.cc.

4809  {
4810  v->VisitRootPointers(Root::kStrongRootList, nullptr,
4811  roots_table().strong_roots_begin(),
4812  roots_table().strong_roots_end());
4813  v->Synchronize(VisitorSynchronization::kStrongRootList);
4814 
4815  isolate_->bootstrapper()->Iterate(v);
4816  v->Synchronize(VisitorSynchronization::kBootstrapper);
4818  v->Synchronize(VisitorSynchronization::kRelocatable);
4819  isolate_->debug()->Iterate(v);
4820  v->Synchronize(VisitorSynchronization::kDebug);
4821 
4823  v->Synchronize(VisitorSynchronization::kCompilationCache);
4824 
4825  const bool skip_iterate_builtins =
4826  options.contains(SkipRoot::kOldGeneration) ||
4828  options.contains(SkipRoot::kReadOnlyBuiltins) &&
4829  // Prior to ReadOnlyPromotion, builtins may be on the mutable heap.
4831  if (!skip_iterate_builtins) {
4832  IterateBuiltins(v);
4833  v->Synchronize(VisitorSynchronization::kBuiltins);
4834  }
4835 
4836  // Iterate over pointers being held by inactive threads.
4838  v->Synchronize(VisitorSynchronization::kThreadManager);
4839 
4840  // Visitors in this block only run when not serializing. These include:
4841  //
4842  // - Thread-local and stack.
4843  // - Handles.
4844  // - Microtasks.
4845  // - The startup object cache.
4846  //
4847  // When creating real startup snapshot, these areas are expected to be empty.
4848  // It is also possible to create a snapshot of a *running* isolate for testing
4849  // purposes. In this case, these areas are likely not empty and will simply be
4850  // skipped.
4851  //
4852  // The general guideline for adding visitors to this section vs. adding them
4853  // above is that non-transient heap state is always visited, transient heap
4854  // state is visited only when not serializing.
4855  if (!options.contains(SkipRoot::kUnserializable)) {
4856  if (!options.contains(SkipRoot::kTracedHandles)) {
4857  // Young GCs always skip traced handles and visit them manually.
4858  DCHECK(!options.contains(SkipRoot::kOldGeneration));
4859 
4861  }
4862 
4863  if (!options.contains(SkipRoot::kGlobalHandles)) {
4864  // Young GCs always skip global handles and visit them manually.
4865  DCHECK(!options.contains(SkipRoot::kOldGeneration));
4866 
4867  if (options.contains(SkipRoot::kWeak)) {
4869  } else {
4871  }
4872  }
4873  v->Synchronize(VisitorSynchronization::kGlobalHandles);
4874 
4875  if (!options.contains(SkipRoot::kStack)) {
4876  ClearStaleLeftTrimmedPointerVisitor left_trim_visitor(this, v);
4877  IterateStackRoots(&left_trim_visitor);
4878  if (!options.contains(SkipRoot::kConservativeStack)) {
4879  IterateConservativeStackRoots(v, roots_mode);
4880  }
4881  v->Synchronize(VisitorSynchronization::kStackRoots);
4882  }
4883 
4884  // Iterate over main thread handles in handle scopes.
4885  if (!options.contains(SkipRoot::kMainThreadHandles)) {
4886  // Clear main thread handles with stale references to left-trimmed
4887  // objects. The GC would crash on such stale references.
4888  ClearStaleLeftTrimmedPointerVisitor left_trim_visitor(this, v);
4889  isolate_->handle_scope_implementer()->Iterate(&left_trim_visitor);
4890  }
4891  // Iterate local handles for all local heaps.
4892  safepoint_->Iterate(v);
4893  // Iterates all persistent handles.
4895  v->Synchronize(VisitorSynchronization::kHandleScope);
4896 
4897  if (options.contains(SkipRoot::kOldGeneration)) {
4899  } else {
4901  }
4902  v->Synchronize(VisitorSynchronization::kEternalHandles);
4903 
4904  // Iterate over pending Microtasks stored in MicrotaskQueues.
4905  MicrotaskQueue* default_microtask_queue =
4906  isolate_->default_microtask_queue();
4907  if (default_microtask_queue) {
4908  MicrotaskQueue* microtask_queue = default_microtask_queue;
4909  do {
4910  microtask_queue->IterateMicrotasks(v);
4911  microtask_queue = microtask_queue->next();
4912  } while (microtask_queue != default_microtask_queue);
4913  }
4914  v->Synchronize(VisitorSynchronization::kMicroTasks);
4915 
4916  // Iterate over other strong roots (currently only identity maps and
4917  // deoptimization entries).
4918  for (StrongRootsEntry* current = strong_roots_head_; current;
4919  current = current->next) {
4920  v->VisitRootPointers(Root::kStrongRoots, current->label, current->start,
4921  current->end);
4922  }
4923  v->Synchronize(VisitorSynchronization::kStrongRoots);
4924 
4925  // Iterate over the startup and shared heap object caches unless
4926  // serializing or deserializing.
4928  v->Synchronize(VisitorSynchronization::kStartupObjectCache);
4929 
4930  // Iterate over shared heap object cache when the isolate owns this data
4931  // structure. Isolates which own the shared heap object cache are:
4932  // * All isolates when not using --shared-string-table.
4933  // * Shared space/main isolate with --shared-string-table.
4934  //
4935  // Isolates which do not own the shared heap object cache should not iterate
4936  // it.
4937  if (isolate_->OwnsStringTables()) {
4939  v->Synchronize(VisitorSynchronization::kSharedHeapObjectCache);
4940  }
4941  }
4942 
4943  if (!options.contains(SkipRoot::kWeak)) {
4944  IterateWeakRoots(v, options);
4945  }
4946 }
void Iterate(RootVisitor *v)
static constexpr bool kCodeObjectsAreInROSpace
Definition: builtins.h:105
void Iterate(RootVisitor *v)
Definition: debug.cc:527
void IterateAllRoots(RootVisitor *visitor)
void IterateYoungRoots(RootVisitor *visitor)
void IterateAllRoots(RootVisitor *v)
void IterateStrongRoots(RootVisitor *v)
StrongRootsEntry * strong_roots_head_
Definition: heap.h:2377
void IterateStackRoots(RootVisitor *v)
Definition: heap.cc:4985
void IterateWeakRoots(RootVisitor *v, base::EnumSet< SkipRoot > options)
Definition: heap.cc:4683
void IterateBuiltins(RootVisitor *v)
Definition: heap.cc:4967
bool serializer_enabled() const
Definition: isolate.h:1575
bool OwnsStringTables() const
Definition: isolate.h:2361
TracedHandles * traced_handles()
Definition: isolate.h:1433
PersistentHandlesList * persistent_handles_list() const
Definition: isolate.h:1727
ThreadManager * thread_manager() const
Definition: isolate.h:1437
Debug * debug() const
Definition: isolate.h:1500
Bootstrapper * bootstrapper()
Definition: isolate.h:1193
void Iterate(RootVisitor *visitor, Isolate *isolate)
static void Iterate(Isolate *isolate, RootVisitor *v)
Definition: objects.cc:4146
static void IterateStartupObjectCache(Isolate *isolate, RootVisitor *visitor)
static void IterateSharedHeapObjectCache(Isolate *isolate, RootVisitor *visitor)
StrongRootsEntry * next
Definition: heap.h:172
void Iterate(RootVisitor *v)
Definition: v8threads.cc:302
void Iterate(RootVisitor *)

References v8::internal::Isolate::bootstrapper(), v8::internal::Isolate::compilation_cache(), v8::base::EnumSet< E, T >::contains(), v8::internal::DCHECK(), v8::internal::Isolate::debug(), v8::internal::Isolate::eternal_handles(), v8::internal::Isolate::global_handles(), v8::internal::Isolate::handle_scope_implementer(), isolate_, v8::internal::HandleScopeImplementer::Iterate(), v8::internal::Relocatable::Iterate(), v8::internal::TracedHandles::Iterate(), v8::internal::CompilationCache::Iterate(), v8::internal::Debug::Iterate(), v8::internal::ThreadManager::Iterate(), v8::internal::Bootstrapper::Iterate(), v8::internal::PersistentHandlesList::Iterate(), v8::internal::GlobalHandles::IterateAllRoots(), v8::internal::EternalHandles::IterateAllRoots(), IterateBuiltins(), IterateConservativeStackRoots(), v8::internal::MicrotaskQueue::IterateMicrotasks(), v8::internal::SerializerDeserializer::IterateSharedHeapObjectCache(), IterateStackRoots(), v8::internal::SerializerDeserializer::IterateStartupObjectCache(), v8::internal::GlobalHandles::IterateStrongRoots(), IterateWeakRoots(), v8::internal::EternalHandles::IterateYoungRoots(), v8::internal::Builtins::kCodeObjectsAreInROSpace, v8::internal::kConservativeStack, v8::internal::kGlobalHandles, v8::internal::kMainThreadHandles, v8::internal::kOldGeneration, v8::internal::kReadOnlyBuiltins, v8::internal::kStack, v8::internal::kTracedHandles, v8::internal::kUnserializable, v8::internal::kWeak, v8::internal::MicrotaskQueue::next(), v8::internal::StrongRootsEntry::next, v8::internal::Isolate::OwnsStringTables(), v8::internal::Isolate::persistent_handles_list(), roots_table(), safepoint_, v8::internal::Isolate::serializer_enabled(), strong_roots_head_, v8::internal::RootVisitor::Synchronize(), v8::internal::Isolate::thread_manager(), v8::internal::Isolate::traced_handles(), and v8::internal::RootVisitor::VisitRootPointers().

Referenced by v8::internal::ScavengerCollector::CollectGarbage(), v8::internal::StartupDeserializer::DeserializeIntoIsolate(), v8::internal::V8HeapExplorer::IterateAndExtractReferences(), IterateRootsIncludingClients(), LeftTrimFixedArray(), v8::internal::UnreachableObjectsFilter::MarkReachableObjects(), v8::internal::IncrementalMarking::MarkRoots(), v8::internal::MarkCompactCollector::MarkRoots(), v8::internal::MinorMarkSweepCollector::MarkRoots(), and v8::internal::StartupSerializer::SerializeStrongReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IterateRootsForPrecisePinning()

void v8::internal::Heap::IterateRootsForPrecisePinning ( RootVisitor visitor)

Definition at line 5025 of file heap.cc.

5025  {
5026  IterateStackRoots(visitor);
5027  isolate()->handle_scope_implementer()->Iterate(visitor);
5028 }

References v8::internal::Isolate::handle_scope_implementer(), isolate(), v8::internal::HandleScopeImplementer::Iterate(), and IterateStackRoots().

Referenced by v8::internal::ScavengerCollector::CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IterateRootsIncludingClients()

void v8::internal::Heap::IterateRootsIncludingClients ( RootVisitor v,
base::EnumSet< SkipRoot options 
)

Definition at line 4948 of file heap.cc.

4949  {
4951 
4952  if (isolate()->is_shared_space_isolate()) {
4953  ClientRootVisitor<> client_root_visitor(v);
4955  [v = &client_root_visitor, options](Isolate* client) {
4956  client->heap()->IterateRoots(v, options,
4958  });
4959  }
4960 }
void IterateRoots(RootVisitor *v, base::EnumSet< SkipRoot > options, IterateRootsMode roots_mode=IterateRootsMode::kMainIsolate)
Definition: heap.cc:4808

References v8::internal::Isolate::global_safepoint(), v8::internal::Isolate::heap(), isolate(), v8::internal::GlobalSafepoint::IterateClientIsolates(), IterateRoots(), kClientIsolate, and kMainIsolate.

+ Here is the call graph for this function:

◆ IterateSmiRoots()

void v8::internal::Heap::IterateSmiRoots ( RootVisitor v)

Definition at line 4722 of file heap.cc.

4722  {
4723  // Acquire execution access since we are going to read stack limit values.
4724  ExecutionAccess access(isolate());
4725  v->VisitRootPointers(Root::kSmiRootList, nullptr,
4726  roots_table().smi_roots_begin(),
4727  roots_table().smi_roots_end());
4728  v->Synchronize(VisitorSynchronization::kSmiRootList);
4729 }

References isolate(), roots_table(), v8::internal::RootVisitor::Synchronize(), and v8::internal::RootVisitor::VisitRootPointers().

Referenced by v8::internal::StartupDeserializer::DeserializeIntoIsolate(), and v8::internal::StartupSerializer::SerializeStrongReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IterateStackRoots()

void v8::internal::Heap::IterateStackRoots ( RootVisitor v)

Definition at line 4985 of file heap.cc.

4985 { isolate_->Iterate(v); }
void Iterate(RootVisitor *v)
Definition: isolate.cc:659

References isolate_, and v8::internal::Isolate::Iterate().

Referenced by IterateRoots(), and IterateRootsForPrecisePinning().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IterateWeakGlobalHandles()

void v8::internal::Heap::IterateWeakGlobalHandles ( RootVisitor v)

Definition at line 4962 of file heap.cc.

4962  {
4965 }
void IterateWeakRoots(RootVisitor *v)

References v8::internal::Isolate::global_handles(), isolate_, v8::internal::TracedHandles::Iterate(), v8::internal::GlobalHandles::IterateWeakRoots(), and v8::internal::Isolate::traced_handles().

Referenced by v8::internal::V8HeapExplorer::IterateAndExtractReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IterateWeakRoots()

void v8::internal::Heap::IterateWeakRoots ( RootVisitor v,
base::EnumSet< SkipRoot options 
)

Definition at line 4683 of file heap.cc.

4683  {
4684  DCHECK(!options.contains(SkipRoot::kWeak));
4685 
4686  if (!options.contains(SkipRoot::kUnserializable)) {
4687  // Isolate::topmost_script_having_context_address is treated weakly.
4688  v->VisitRootPointer(
4689  Root::kWeakRoots, nullptr,
4690  FullObjectSlot(isolate()->topmost_script_having_context_address()));
4691  }
4692 
4693  if (!options.contains(SkipRoot::kOldGeneration) &&
4694  !options.contains(SkipRoot::kUnserializable) &&
4695  isolate()->OwnsStringTables()) {
4696  // Do not visit for the following reasons.
4697  // - Serialization, since the string table is custom serialized.
4698  // - If we are skipping old generation, since all internalized strings
4699  // are in old space.
4700  // - If the string table is shared and this is not the shared heap,
4701  // since all internalized strings are in the shared heap.
4703  }
4704  v->Synchronize(VisitorSynchronization::kStringTable);
4705  if (!options.contains(SkipRoot::kExternalStringTable) &&
4706  !options.contains(SkipRoot::kUnserializable)) {
4707  // Scavenge collections have special processing for this.
4708  // Do not visit for serialization, since the external string table will
4709  // be populated from scratch upon deserialization.
4711  }
4712  v->Synchronize(VisitorSynchronization::kExternalStringsTable);
4713  if (!options.contains(SkipRoot::kOldGeneration) &&
4714  !options.contains(SkipRoot::kUnserializable) &&
4715  isolate()->is_shared_space_isolate() &&
4716  isolate()->shared_struct_type_registry()) {
4718  }
4719  v->Synchronize(VisitorSynchronization::kSharedStructTypeRegistry);
4720 }
void IterateAll(RootVisitor *v)
Definition: heap.cc:2981
SharedStructTypeRegistry * shared_struct_type_registry() const
Definition: isolate.h:806
void IterateElements(Isolate *isolate, RootVisitor *visitor)
Definition: js-struct.cc:496
void IterateElements(RootVisitor *visitor)

References v8::base::EnumSet< E, T >::contains(), v8::internal::DCHECK(), external_string_table_, isolate(), v8::internal::Heap::ExternalStringTable::IterateAll(), v8::internal::SharedStructTypeRegistry::IterateElements(), v8::internal::StringTable::IterateElements(), v8::internal::kExternalStringTable, v8::internal::kOldGeneration, v8::internal::kUnserializable, v8::internal::kWeak, v8::internal::Isolate::shared_struct_type_registry(), v8::internal::Isolate::string_table(), v8::internal::RootVisitor::Synchronize(), and v8::internal::RootVisitor::VisitRootPointer().

Referenced by v8::internal::StartupDeserializer::DeserializeIntoIsolate(), v8::internal::V8HeapExplorer::IterateAndExtractReferences(), IterateRoots(), and v8::internal::StartupSerializer::SerializeWeakReferencesAndDeferred().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ KeepDuringJob()

void v8::internal::Heap::KeepDuringJob ( DirectHandle< HeapObject target)

Definition at line 7127 of file heap.cc.

7127  {
7128  DCHECK(IsUndefined(weak_refs_keep_during_job()) ||
7129  IsOrderedHashSet(weak_refs_keep_during_job()));
7130  Handle<OrderedHashSet> table;
7131  if (IsUndefined(weak_refs_keep_during_job(), isolate())) {
7132  table = isolate()->factory()->NewOrderedHashSet();
7133  } else {
7134  table =
7135  handle(Cast<OrderedHashSet>(weak_refs_keep_during_job()), isolate());
7136  }
7137  MaybeHandle<OrderedHashSet> maybe_table =
7138  OrderedHashSet::Add(isolate(), table, target);
7139  if (!maybe_table.ToHandle(&table)) {
7141  "Fatal JavaScript error: Too many distinct WeakRef objects "
7142  "created or dereferenced during single event loop turn.");
7143  }
7144  set_weak_refs_keep_during_job(*table);
7145 }
Handle< OrderedHashSet > NewOrderedHashSet()
Definition: factory.cc:562
void Add(RWDigits Z, Digits X, Digits Y)

References v8::bigint::Add(), v8::internal::DCHECK(), v8::internal::Isolate::factory(), FatalProcessOutOfMemory(), v8::internal::handle(), isolate(), v8::internal::Factory::NewOrderedHashSet(), and v8::internal::MaybeHandle< T >::ToHandle().

+ Here is the call graph for this function:

◆ LeftTrimFixedArray()

Tagged< FixedArrayBase > v8::internal::Heap::LeftTrimFixedArray ( Tagged< FixedArrayBase obj,
int  elements_to_trim 
)

Definition at line 3508 of file heap.cc.

3509  {
3510  if (elements_to_trim == 0) {
3511  // This simplifies reasoning in the rest of the function.
3512  return object;
3513  }
3514  CHECK(!object.is_null());
3515  DCHECK(CanMoveObjectStart(object));
3516  // Add custom visitor to concurrent marker if new left-trimmable type
3517  // is added.
3518  DCHECK(IsFixedArray(object) || IsFixedDoubleArray(object));
3519  const int element_size = IsFixedArray(object) ? kTaggedSize : kDoubleSize;
3520  const int bytes_to_trim = elements_to_trim * element_size;
3521  Tagged<Map> map = object->map();
3522 
3523  // For now this trick is only applied to fixed arrays which may be in new
3524  // space or old space. In a large object space the object's start must
3525  // coincide with chunk and thus the trick is just not applicable.
3526  DCHECK(!IsLargeObject(object));
3527  DCHECK(object->map() != ReadOnlyRoots(this).fixed_cow_array_map());
3528 
3529  static_assert(offsetof(FixedArrayBase, map_) == 0);
3530  static_assert(offsetof(FixedArrayBase, length_) == kTaggedSize);
3531  static_assert(sizeof(FixedArrayBase) == 2 * kTaggedSize);
3532 
3533  const int len = object->length();
3534  DCHECK(elements_to_trim <= len);
3535 
3536  // Calculate location of new array start.
3537  Address old_start = object.address();
3538  Address new_start = old_start + bytes_to_trim;
3539 
3540  // Technically in new space this write might be omitted (except for
3541  // debug mode which iterates through the heap), but to play safer
3542  // we still do it.
3544  WritableFreeSpace::ForNonExecutableMemory(old_start, bytes_to_trim),
3549 
3550  // Initialize header of the trimmed array. Since left trimming is only
3551  // performed on pages which are not concurrently swept creating a filler
3552  // object does not require synchronization.
3553  RELAXED_WRITE_FIELD(object, bytes_to_trim,
3554  Tagged<Object>(MapWord::FromMap(map).ptr()));
3555  RELAXED_WRITE_FIELD(object, bytes_to_trim + kTaggedSize,
3556  Smi::FromInt(len - elements_to_trim));
3557 
3558  Tagged<FixedArrayBase> new_object =
3559  Cast<FixedArrayBase>(HeapObject::FromAddress(new_start));
3560 
3561  if (isolate()->log_object_relocation()) {
3562  // Notify the heap profiler of change in object layout.
3563  OnMoveEvent(object, new_object, new_object->Size());
3564  }
3565 
3566 #ifdef ENABLE_SLOW_DCHECKS
3567  if (v8_flags.enable_slow_asserts) {
3568  // Make sure the stack or other roots (e.g., Handles) don't contain pointers
3569  // to the original FixedArray (which is now the filler object).
3570  std::optional<IsolateSafepointScope> safepoint_scope;
3571 
3572  {
3573  AllowGarbageCollection allow_gc;
3574  safepoint_scope.emplace(this);
3575  }
3576 
3577  LeftTrimmerVerifierRootVisitor root_visitor(object);
3578  ReadOnlyRoots(this).Iterate(&root_visitor);
3579 
3580  // Stale references are allowed in some locations. IterateRoots() uses
3581  // ClearStaleLeftTrimmedPointerVisitor internally to clear such references
3582  // beforehand.
3583  IterateRoots(&root_visitor,
3584  base::EnumSet<SkipRoot>{SkipRoot::kConservativeStack});
3585  }
3586 #endif // ENABLE_SLOW_DCHECKS
3587 
3588  return new_object;
3589 }
void OnMoveEvent(Tagged< HeapObject > source, Tagged< HeapObject > target, int size_in_bytes)
Definition: heap.cc:3483
bool CanMoveObjectStart(Tagged< HeapObject > object)
Definition: heap.cc:3384
static MapWord FromMap(const Tagged< Map > map)
Definition: objects-inl.h:1253
bool MayContainRecordedSlots(Tagged< HeapObject > object)
Definition: heap.cc:3471
#define RELAXED_WRITE_FIELD(p, offset, value)

References CanMoveObjectStart(), CHECK, CreateFillerObjectAtRaw(), v8::internal::DCHECK(), v8::internal::WritableFreeSpace::ForNonExecutableMemory(), v8::internal::HeapObject::FromAddress(), v8::internal::Smi::FromInt(), v8::internal::MapWord::FromMap(), IsLargeObject(), isolate(), IterateRoots(), v8::internal::kClearFreedMemory, v8::internal::kConservativeStack, v8::internal::kDoubleSize, v8::internal::kNo, v8::internal::kTaggedSize, v8::internal::kYes, kYes, v8::internal::anonymous_namespace{heap.cc}::MayContainRecordedSlots(), OnMoveEvent(), ReadOnlyRoots, RELAXED_WRITE_FIELD, and v8::internal::v8_flags.

Referenced by v8::internal::anonymous_namespace{elements.cc}::FastElementsAccessor< Subclass, KindTraits >::RemoveElement().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ lo_space()

◆ main_thread_local_heap()

◆ major_sweeping_in_progress()

bool v8::internal::Heap::major_sweeping_in_progress ( ) const
inline

Definition at line 1598 of file heap.h.

1598  {
1599  return sweeper_->major_sweeping_in_progress();
1600  }

Referenced by EnsureSweepingCompleted(), FinishSweepingIfOutOfWork(), and PrintShortHeapStatistics().

+ Here is the caller graph for this function:

◆ MakeHeapIterable()

void v8::internal::Heap::MakeHeapIterable ( )

Definition at line 3661 of file heap.cc.

References EnsureSweepingCompleted(), kV8Only, and MakeLinearAllocationAreasIterable().

Referenced by CollectCodeStatistics(), and v8::internal::HeapObjectIterator::HeapObjectIterator().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MakeLinearAllocationAreasIterable()

void v8::internal::Heap::MakeLinearAllocationAreasIterable ( )
private

Definition at line 3667 of file heap.cc.

3667  {
3669 
3670  safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) {
3671  local_heap->MakeLinearAllocationAreasIterable();
3672  });
3673 
3674  if (isolate()->is_shared_space_isolate()) {
3676  client->heap()->MakeLinearAllocationAreasIterable();
3677  });
3678  }
3679 }

References allocator(), v8::internal::Isolate::global_safepoint(), v8::internal::Isolate::heap(), isolate(), v8::internal::GlobalSafepoint::IterateClientIsolates(), v8::internal::IsolateSafepoint::IterateLocalHeaps(), v8::internal::HeapAllocator::MakeLinearAllocationAreasIterable(), v8::internal::LocalHeap::MakeLinearAllocationAreasIterable(), and safepoint().

Referenced by MakeHeapIterable().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ mark_compact_collector()

◆ MarkCompact()

void v8::internal::Heap::MarkCompact ( )
private

Definition at line 2733 of file heap.cc.

2733  {
2735 
2736  PROFILE(isolate_, CodeMovingGCEvent());
2737 
2739  uint64_t size_of_objects_before_gc = SizeOfObjects();
2740 
2742 
2743  ms_count_++;
2744  contexts_disposed_ = 0;
2745 
2747 
2749 
2751 
2752  if (v8_flags.allocation_site_pretenuring) {
2753  EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc);
2754  }
2755  // This should be updated before PostGarbageCollectionProcessing, which
2756  // can cause another GC. Take into account the objects promoted during
2757  // GC.
2759  static_cast<size_t>(promoted_objects_size_);
2764  // Limits can now be computed based on estimate from MARK_COMPACT.
2765  set_using_initial_limit(false);
2766 }
void UpdateLowSinceMarkCompact(uint64_t amount)
Definition: heap.h:262
size_t old_generation_size_at_last_gc_
Definition: heap.h:2395
void MarkCompactPrologue()
Definition: heap.cc:2788
V8_EXPORT_PRIVATE void SetGCState(HeapState state)
Definition: heap.cc:523
int contexts_disposed_
Definition: heap.h:2204
void MarkCompactEpilogue()
Definition: heap.cc:2781
size_t old_generation_wasted_at_last_gc_
Definition: heap.h:2398
void UpdateOldGenerationAllocationCounter()
Definition: heap.h:1393
void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc)
Definition: heap.cc:3094
size_t old_generation_allocation_counter_at_last_gc_
Definition: heap.h:2392
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:856
#define PROFILE(the_isolate, Call)
Definition: code-events.h:59

References v8::internal::MarkCompactCollector::CollectGarbage(), contexts_disposed_, embedder_size_at_last_gc_, EmbedderSizeOfObjects(), EvaluateOldSpaceLocalPretenuring(), external_memory_, isolate_, MARK_COMPACT, mark_compact_collector(), MarkCompactEpilogue(), MarkCompactPrologue(), ms_count_, old_generation_allocation_counter_at_last_gc_, old_generation_size_at_last_gc_, old_generation_wasted_at_last_gc_, OldGenerationSizeOfObjects(), OldGenerationWastedBytes(), v8::internal::MarkCompactCollector::Prepare(), PROFILE, promoted_objects_size_, set_using_initial_limit(), SetGCState(), SizeOfObjects(), v8::internal::Heap::ExternalMemoryAccounting::total(), v8::internal::Heap::ExternalMemoryAccounting::UpdateLowSinceMarkCompact(), UpdateOldGenerationAllocationCounter(), and v8::internal::v8_flags.

Referenced by PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MarkCompactEpilogue()

void v8::internal::Heap::MarkCompactEpilogue ( )
private

Definition at line 2781 of file heap.cc.

2781  {
2782  TRACE_GC(tracer(), GCTracer::Scope::MC_EPILOGUE);
2784 
2785  isolate_->counters()->objs_since_last_full()->Set(0);
2786 }

References v8::internal::Isolate::counters(), isolate_, NOT_IN_GC, SetGCState(), TRACE_GC, and tracer().

Referenced by MarkCompact().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MarkCompactPrologue()

void v8::internal::Heap::MarkCompactPrologue ( )
private

Definition at line 2788 of file heap.cc.

2788  {
2789  TRACE_GC(tracer(), GCTracer::Scope::MC_PROLOGUE);
2791  RegExpResultsCache::Clear(string_split_cache());
2792  RegExpResultsCache::Clear(regexp_multiple_cache());
2794 
2795  // Flush the number to string caches.
2796  smi_string_cache()->Clear();
2797  double_string_cache()->Clear();
2798 }
static void Clear(Tagged< FixedArray > cache)
Definition: regexp.cc:1396

References v8::internal::DescriptorLookupCache::Clear(), v8::internal::RegExpResultsCache_MatchGlobalAtom::Clear(), v8::internal::RegExpResultsCache::Clear(), v8::internal::Isolate::descriptor_lookup_cache(), isolate_, TRACE_GC, and tracer().

Referenced by MarkCompact().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ marking_state()

MarkingState* v8::internal::Heap::marking_state ( )
inline

Definition at line 1677 of file heap.h.

1677 { return &marking_state_; }

Referenced by v8::internal::HeapAllocator::AllocateRaw(), v8::internal::OldLargeObjectSpace::AllocateRaw(), RightTrimArray(), and v8::internal::InternalizedStringTableCleaner::VisitRootPointers().

+ Here is the caller graph for this function:

◆ MarkSharedLinearAllocationAreasBlack()

void v8::internal::Heap::MarkSharedLinearAllocationAreasBlack ( )
private

Definition at line 3697 of file heap.cc.

3697  {
3698  DCHECK(!v8_flags.black_allocated_pages);
3701 
3702  safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) {
3703  local_heap->MarkSharedLinearAllocationAreasBlack();
3704  });
3705 }
void MarkSharedLinearAllocationAreasBlack()
Definition: local-heap.cc:433

References allocator(), v8::internal::DCHECK(), v8::internal::IsolateSafepoint::IterateLocalHeaps(), main_thread_local_heap(), v8::internal::HeapAllocator::MarkSharedLinearAllocationAreasBlack(), v8::internal::LocalHeap::MarkSharedLinearAllocationAreasBlack(), safepoint(), and v8::internal::v8_flags.

+ Here is the call graph for this function:

◆ max_old_generation_size()

size_t v8::internal::Heap::max_old_generation_size ( ) const
inlineprivate

Definition at line 1999 of file heap.h.

1999  {
2000  return max_old_generation_size_.load(std::memory_order_relaxed);
2001  }
std::atomic< size_t > max_old_generation_size_
Definition: heap.h:2152

Referenced by AllocationLimitOvershotByLargeMargin(), CanExpandOldGeneration(), CollectGarbage(), ConfigureHeap(), EnsureMinimumRemainingAllocationLimit(), Heap(), InvokeNearHeapLimitCallback(), IsIneffectiveMarkCompact(), IsOldGenerationExpansionAllowed(), MaxReserved(), v8::internal::MemoryBalancer::RefreshLimit(), SetOldGenerationAndGlobalMaximumSize(), and ShouldOptimizeForMemoryUsage().

+ Here is the caller graph for this function:

◆ MaximumCommittedMemory()

size_t v8::internal::Heap::MaximumCommittedMemory ( )
inline

Definition at line 1337 of file heap.h.

1337 { return maximum_committed_; }
size_t maximum_committed_
Definition: heap.h:2180

Referenced by GarbageCollectionEpilogue().

+ Here is the caller graph for this function:

◆ MaxOldGenerationSize()

size_t v8::internal::Heap::MaxOldGenerationSize ( )
inline

Definition at line 1294 of file heap.h.

1294 { return max_old_generation_size(); }

Referenced by v8::internal::Isolate::InitializeIsShortBuiltinCallsEnabled().

+ Here is the caller graph for this function:

◆ MaxOldGenerationSizeFromPhysicalMemory()

size_t v8::internal::Heap::MaxOldGenerationSizeFromPhysicalMemory ( uint64_t  physical_memory)
static

Definition at line 289 of file heap.cc.

289  {
290  size_t max_size = DefaulMaxHeapSize();
291  // Increase the heap size from 2GB to 4GB for 64-bit systems with physical
292  // memory at least 16GB. The threshold is set to 15GB to accommodate for some
293  // memory being reserved by the hardware.
294 #ifdef V8_HOST_ARCH_64_BIT
295  if ((physical_memory / GB) >= 15) {
296 #if V8_OS_ANDROID
297  // As of 2024, Android devices with 16GiB are shipping (for instance the
298  // Pixel 9 Pro). However, a large fraction of their memory is not usable,
299  // and there is no disk swap, so heaps are still smaller than on desktop for
300  // now.
301  DCHECK_EQ(max_size / GB, v8_flags.high_end_android ? 2u : 1u);
302 #else
303  DCHECK_EQ(max_size / GB, 2u);
304 #endif
305  max_size *= 2;
306  }
307 #endif // V8_HOST_ARCH_64_BIT
308  return std::min(max_size, AllocatorLimitOnMaxOldGenerationSize());
309 }
static V8_EXPORT_PRIVATE size_t DefaulMaxHeapSize()
Definition: heap.cc:5089
constexpr int GB
Definition: v8-internal.h:57

References AllocatorLimitOnMaxOldGenerationSize(), DCHECK_EQ, DefaulMaxHeapSize(), v8::internal::GB, and v8::internal::v8_flags.

Referenced by HeapSizeFromPhysicalMemory().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MaxRegularHeapObjectSize()

int v8::internal::Heap::MaxRegularHeapObjectSize ( AllocationType  allocation)
inline

Definition at line 189 of file heap-inl.h.

References DCHECK_EQ, v8::internal::kCode, v8::internal::kMaxRegularHeapObjectSize, max_regular_code_object_size_, and v8::internal::MemoryChunkLayout::MaxRegularCodeObjectSize().

Referenced by v8::internal::HeapAllocator::AllocateRaw(), v8::internal::HeapAllocator::AllocateRawLargeInternal(), v8::internal::compiler::AllocationBuilder::CanAllocateArray(), and v8::internal::compiler::AllocationBuilder::CanAllocateSloppyArgumentElements().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MaxReserved()

size_t v8::internal::Heap::MaxReserved ( ) const

Definition at line 202 of file heap.cc.

202  {
203  const size_t kMaxNewLargeObjectSpaceSize = max_semi_space_size_;
204  return static_cast<size_t>(
205  (v8_flags.minor_ms ? 1 : 2) * max_semi_space_size_ +
206  kMaxNewLargeObjectSpaceSize + max_old_generation_size());
207 }

References max_old_generation_size(), max_semi_space_size_, and v8::internal::v8_flags.

Referenced by CanExpandOldGeneration(), and SetUp().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MaxSemiSpaceSize()

size_t v8::internal::Heap::MaxSemiSpaceSize ( )
inline

Definition at line 1292 of file heap.h.

1292 { return max_semi_space_size_; }

◆ MeasureMemory()

bool v8::internal::Heap::MeasureMemory ( std::unique_ptr< v8::MeasureMemoryDelegate delegate,
v8::MeasureMemoryExecution  execution 
)

Definition at line 4425 of file heap.cc.

4426  {
4427  HandleScope handle_scope(isolate());
4428  std::vector<Handle<NativeContext>> contexts = FindAllNativeContexts();
4429  std::vector<Handle<NativeContext>> to_measure;
4430  for (auto& current : contexts) {
4431  if (delegate->ShouldMeasure(v8::Utils::ToLocal(current))) {
4432  to_measure.push_back(current);
4433  }
4434  }
4435  return memory_measurement_->EnqueueRequest(std::move(delegate), execution,
4436  to_measure);
4437 }
std::unique_ptr< MemoryMeasurement > memory_measurement_
Definition: heap.h:2339
std::vector< Handle< NativeContext > > FindAllNativeContexts()
Definition: heap.cc:7203

References FindAllNativeContexts(), isolate(), and memory_measurement_.

Referenced by v8::Isolate::MeasureMemory().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ memory_allocator() [1/2]

MemoryAllocator* v8::internal::Heap::memory_allocator ( )
inline

Definition at line 846 of file heap.h.

846 { return memory_allocator_.get(); }

Referenced by v8::internal::SemiSpace::AllocateFreshPage(), v8::internal::LargeObjectSpace::AllocateLargePage(), v8::internal::ReadOnlySpace::AllocateNextPage(), v8::internal::ReadOnlySpace::AllocateNextPageAt(), Available(), CanExpandOldGeneration(), CommittedMemoryExecutable(), Contains(), ContainsCode(), v8::internal::Sweeper::EnsureMajorCompleted(), v8::internal::ReadOnlySpace::EnsureSpaceForAllocation(), v8::internal::MarkCompactCollector::Finish(), v8::internal::NewLargeObjectSpace::FreeDeadObjects(), GarbageCollectionPrologueInSafepoint(), InSpace(), InSpaceSlow(), v8::internal::GCTracer::Print(), v8::internal::GCTracer::PrintNVP(), PrintShortHeapStatistics(), RecordStats(), ReplaceReadOnlySpace(), v8::internal::SemiSpace::RewindPages(), v8::internal::ReadOnlySpace::Seal(), v8::internal::LargeObjectSpace::ShrinkPageToObjectSize(), v8::internal::ReadOnlyPageMetadata::ShrinkToHighWaterMark(), v8::internal::GCTracer::StartInSafepoint(), v8::internal::MinorMarkSweepCollector::StartSweepNewSpace(), v8::internal::GCTracer::StopInSafepoint(), v8::internal::MinorMarkSweepCollector::SweepNewLargeSpace(), TearDown(), v8::internal::LargeObjectSpace::TearDown(), v8::internal::HeapAllocator::TryResizeLargeObject(), v8::internal::Sweeper::ZeroOrDiscardUnusedMemory(), and v8::internal::SemiSpace::~SemiSpace().

+ Here is the caller graph for this function:

◆ memory_allocator() [2/2]

const MemoryAllocator* v8::internal::Heap::memory_allocator ( ) const
inline

Definition at line 847 of file heap.h.

847  {
848  return memory_allocator_.get();
849  }

◆ memory_measurement()

MemoryMeasurement* v8::internal::Heap::memory_measurement ( )
inlineprivate

Definition at line 2115 of file heap.h.

2115 { return memory_measurement_.get(); }

Referenced by v8::internal::MarkCompactCollector::CollectGarbage(), and v8::internal::MarkCompactCollector::StartMarking().

+ Here is the caller graph for this function:

◆ memory_reducer()

MemoryReducer* v8::internal::Heap::memory_reducer ( )
inlineprivate

Definition at line 1971 of file heap.h.

1971 { return memory_reducer_.get(); }

Referenced by CurrentHeapGrowingMode(), NotifyOldGenerationExpansion(), and StartIncrementalMarkingIfAllocationLimitIsReached().

+ Here is the caller graph for this function:

◆ MemoryPressureNotification()

void v8::internal::Heap::MemoryPressureNotification ( v8::MemoryPressureLevel  level,
bool  is_isolate_locked 
)

Definition at line 4330 of file heap.cc.

4331  {
4332  TRACE_EVENT1("devtools.timeline,v8", "V8.MemoryPressureNotification", "level",
4333  static_cast<int>(level));
4334  MemoryPressureLevel previous =
4335  memory_pressure_level_.exchange(level, std::memory_order_relaxed);
4336  if ((previous != MemoryPressureLevel::kCritical &&
4337  level == MemoryPressureLevel::kCritical) ||
4338  (previous == MemoryPressureLevel::kNone &&
4339  level == MemoryPressureLevel::kModerate)) {
4340  if (is_isolate_locked) {
4342  } else {
4343  ExecutionAccess access(isolate());
4344  isolate()->stack_guard()->RequestGC();
4345  task_runner_->PostTask(
4346  std::make_unique<MemoryPressureInterruptTask>(this));
4347  }
4348  }
4349 }
StackGuard * stack_guard()
Definition: isolate.h:1213
#define TRACE_EVENT1(category_group, name, arg1_name, arg1_val)

References CheckMemoryPressure(), isolate(), v8::kCritical, v8::kModerate, v8::kNone, memory_pressure_level_, v8::internal::Isolate::stack_guard(), task_runner_, and TRACE_EVENT1.

Referenced by v8::internal::wasm::WasmImportWrapperCache::LazyInitialize(), v8::Isolate::MemoryPressureNotification(), v8::internal::wasm::WasmCodeManager::NewNativeModule(), and v8::internal::BackingStore::TryAllocateAndPartiallyCommitMemory().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ min_old_generation_size()

size_t v8::internal::Heap::min_old_generation_size ( ) const
inlineprivate

Definition at line 2003 of file heap.h.

2003 { return min_old_generation_size_; }

Referenced by EnsureMinimumRemainingAllocationLimit(), and v8::internal::MemoryBalancer::RefreshLimit().

+ Here is the caller graph for this function:

◆ MinOldGenerationSize()

size_t v8::internal::Heap::MinOldGenerationSize ( )
static

Definition at line 272 of file heap.cc.

272  {
273  size_t paged_space_count =
275  return paged_space_count * PageMetadata::kPageSize;
276 }
@ FIRST_GROWABLE_PAGED_SPACE
Definition: globals.h:1320
@ LAST_GROWABLE_PAGED_SPACE
Definition: globals.h:1321

References v8::internal::FIRST_GROWABLE_PAGED_SPACE, v8::internal::MutablePageMetadata::kPageSize, and v8::internal::LAST_GROWABLE_PAGED_SPACE.

Referenced by v8::ResourceConstraints::ConfigureDefaultsFromHeapSize(), and ConfigureHeap().

+ Here is the caller graph for this function:

◆ minor_gc_job()

MinorGCJob* v8::internal::Heap::minor_gc_job ( )
inlineprivate

Definition at line 2054 of file heap.h.

2054 { return minor_gc_job_.get(); }
std::unique_ptr< MinorGCJob > minor_gc_job_
Definition: heap.h:2343

Referenced by GarbageCollectionPrologue(), and StartMinorMSConcurrentMarkingIfNeeded().

+ Here is the caller graph for this function:

◆ minor_mark_sweep_collector()

MinorMarkSweepCollector* v8::internal::Heap::minor_mark_sweep_collector ( )
inline

Definition at line 860 of file heap.h.

860  {
861  return minor_mark_sweep_collector_.get();
862  }
std::unique_ptr< MinorMarkSweepCollector > minor_mark_sweep_collector_
Definition: heap.h:2332

Referenced by v8::internal::ConcurrentMarking::GetMinorMaxConcurrency(), HandleGCRequest(), v8::internal::ConcurrentMarking::IsWorkLeft(), v8::internal::IncrementalMarking::MarkRoots(), v8::internal::ConcurrentMarking::RescheduleJobIfNeeded(), v8::internal::ConcurrentMarking::RunMinor(), v8::internal::ConcurrentMarking::RunMinorImpl(), and v8::internal::ConcurrentMarking::TryScheduleJob().

+ Here is the caller graph for this function:

◆ minor_sweeping_in_progress()

bool v8::internal::Heap::minor_sweeping_in_progress ( ) const
inline

Definition at line 1595 of file heap.h.

1595  {
1596  return sweeper_->minor_sweeping_in_progress();
1597  }

Referenced by EnsureSweepingCompleted(), EnsureYoungSweepingCompleted(), PrintShortHeapStatistics(), and v8::internal::IncrementalMarking::Start().

+ Here is the caller graph for this function:

◆ MinorMarkSweep()

void v8::internal::Heap::MinorMarkSweep ( )
private

Definition at line 2768 of file heap.cc.

2768  {
2769  DCHECK(v8_flags.minor_ms);
2771  DCHECK(use_new_space());
2772  DCHECK(!incremental_marking()->IsMajorMarking());
2773 
2774  TRACE_GC(tracer(), GCTracer::Scope::MINOR_MS);
2775 
2777  minor_mark_sweep_collector_->CollectGarbage();
2779 }

References CHECK_EQ, v8::internal::DCHECK(), gc_state(), incremental_marking(), MINOR_MARK_SWEEP, minor_mark_sweep_collector_, NOT_IN_GC, SetGCState(), TRACE_GC, tracer(), use_new_space(), and v8::internal::v8_flags.

Referenced by PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MinorMSSizeTaskTriggerReached()

bool v8::internal::Heap::MinorMSSizeTaskTriggerReached ( ) const
private

◆ MinYoungGenerationSize()

size_t v8::internal::Heap::MinYoungGenerationSize ( )
static

Definition at line 268 of file heap.cc.

References DefaultMinSemiSpaceSize(), and YoungGenerationSizeFromSemiSpaceSize().

Referenced by v8::ResourceConstraints::ConfigureDefaultsFromHeapSize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MonotonicallyIncreasingTimeInMs()

double v8::internal::Heap::MonotonicallyIncreasingTimeInMs ( ) const

Definition at line 4244 of file heap.cc.

4244  {
4246  static_cast<double>(base::Time::kMillisecondsPerSecond);
4247 }
virtual double MonotonicallyIncreasingTime()=0
Monotonically increasing time in seconds from an arbitrary fixed point in the past.
static constexpr int64_t kMillisecondsPerSecond
Definition: time.h:45

References v8::internal::V8::GetCurrentPlatform(), v8::base::TimeConstants::kMillisecondsPerSecond, and v8::Platform::MonotonicallyIncreasingTime().

Referenced by v8::internal::Heap::AllocationTrackerForDebugging::AllocationEvent(), CollectGarbageOnMemoryPressure(), GarbageCollectionEpilogue(), v8::internal::StackGuard::HandleInterrupts(), v8::internal::Isolate::Init(), v8::internal::Heap::AllocationTrackerForDebugging::MoveEvent(), NotifyLoadingStarted(), v8::internal::MemoryReducer::NotifyMarkCompact(), v8::internal::MemoryReducer::NotifyPossibleGarbage(), ShouldOptimizeForLoadTime(), and v8::internal::V8FileLogger::Time().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MoveRange() [1/2]

template<typename TSlot >
void v8::internal::Heap::MoveRange ( Tagged< HeapObject dst_object,
const TSlot  dst_slot,
const TSlot  src_slot,
int  len,
WriteBarrierMode  mode 
)

Definition at line 2117 of file heap.cc.

2118  {
2119  // Ensure no range overflow.
2120  DCHECK(dst_slot < TSlot(dst_slot + len));
2121  DCHECK(src_slot < src_slot + len);
2122 
2123  const auto atomic_callback = [](TSlot dst_slot, TSlot dst_end, TSlot src_slot,
2124  int len) {
2125  if (dst_slot < src_slot) {
2126  // Copy tagged values forward using relaxed load/stores that do not
2127  // involve value decompression.
2128  const AtomicSlot atomic_dst_end(dst_end);
2129  AtomicSlot dst(dst_slot);
2130  AtomicSlot src(src_slot);
2131  while (dst < atomic_dst_end) {
2132  *dst = *src;
2133  ++dst;
2134  ++src;
2135  }
2136  } else {
2137  // Copy tagged values backwards using relaxed load/stores that do not
2138  // involve value decompression.
2139  const AtomicSlot atomic_dst_begin(dst_slot);
2140  AtomicSlot dst(dst_slot + len - 1);
2141  AtomicSlot src(src_slot + len - 1);
2142  while (dst >= atomic_dst_begin) {
2143  *dst = *src;
2144  --dst;
2145  --src;
2146  }
2147  }
2148  };
2149  const auto non_atomic_callback = [](TSlot dst_slot, TSlot src_slot, int len) {
2150  MemMove(dst_slot.ToVoidPtr(), src_slot.ToVoidPtr(), len * kTaggedSize);
2151  };
2152  CopyOrMoveRangeImpl(this, dst_object, dst_slot, src_slot, len, mode,
2153  atomic_callback, non_atomic_callback);
2154 }
V8_EXPORT_PRIVATE void MemMove(void *dest, const void *src, size_t size)
Definition: memcopy.h:189

References v8::internal::anonymous_namespace{heap.cc}::CopyOrMoveRangeImpl(), v8::internal::DCHECK(), v8::internal::kTaggedSize, v8::internal::MemMove(), and mode().

+ Here is the call graph for this function:

◆ MoveRange() [2/2]

template<typename TSlot >
V8_EXPORT_PRIVATE void v8::internal::Heap::MoveRange ( Tagged< HeapObject dst_object,
TSlot  dst_slot,
TSlot  src_slot,
int  len,
WriteBarrierMode  mode 
)

Referenced by v8::internal::TaggedArrayBase< Derived, ShapeT, Super >::MoveElements(), and v8::internal::RUNTIME_FUNCTION().

+ Here is the caller graph for this function:

◆ ms_count()

int v8::internal::Heap::ms_count ( ) const
inline

Definition at line 543 of file heap.h.

543 { return ms_count_; }

◆ MustBeInSharedOldSpace()

bool v8::internal::Heap::MustBeInSharedOldSpace ( Tagged< HeapObject value)

Definition at line 4525 of file heap.cc.

4525  {
4526  if (isolate()->OwnsStringTables()) return false;
4527  if (ReadOnlyHeap::Contains(value)) return false;
4528  if (HeapLayout::InYoungGeneration(value)) return false;
4529  if (IsExternalString(value)) return false;
4530  if (IsInternalizedString(value)) return true;
4531  return false;
4532 }
constexpr bool IsInternalizedString(InstanceType instance_type)

References v8::internal::ReadOnlyHeap::Contains(), v8::internal::HeapLayout::InYoungGeneration(), v8::internal::InstanceTypeChecker::IsExternalString(), v8::internal::InstanceTypeChecker::IsInternalizedString(), isolate(), and v8::internal::value.

+ Here is the call graph for this function:

◆ native_contexts_list()

Tagged<Object> v8::internal::Heap::native_contexts_list ( ) const
inline

Definition at line 504 of file heap.h.

504  {
505  return Tagged<Object>(
506  native_contexts_list_.load(std::memory_order_acquire));
507  }
std::atomic< Address > native_contexts_list_
Definition: heap.h:2299

References v8::internal::Tagged< Object >.

Referenced by FindAllNativeContexts(), FindAllRetainedMaps(), NumberOfNativeContexts(), ProcessNativeContexts(), and ProcessWeakListRoots().

+ Here is the caller graph for this function:

◆ new_lo_space()

◆ new_space()

NewSpace* v8::internal::Heap::new_space ( ) const
inline

Definition at line 770 of file heap.h.

770 { return new_space_; }

Referenced by AllocateExternalBackingStore(), CanPromoteYoungAndExpandOldGeneration(), v8::internal::ScavengerCollector::CollectGarbage(), v8::internal::anonymous_namespace{heap.cc}::ComputeReducedNewSpaceSize(), CreateMutableHeapObjects(), DeactivateMajorGCInProgressFlag(), ExpandNewSpaceSize(), v8::internal::MarkCompactCollector::Finish(), v8::internal::MinorMarkSweepCollector::Finish(), GarbageCollectionEpilogueInSafepoint(), IsStressingScavenge(), v8::internal::MarkCompactCollector::MarkObjectsFromClientHeap(), v8::internal::EvacuateNewToOldSpacePageVisitor::Move(), NewSpaceAllocationCounter(), NewSpaceCapacity(), NewSpaceSize(), NewSpaceTargetCapacity(), NotifyContextDisposed(), NotifyDeserializationComplete(), v8::internal::ScavengerCollector::NumberOfScavengeTasks(), paged_new_space(), v8::internal::MarkCompactCollector::Prepare(), v8::internal::GCTracer::PrintNVP(), v8::internal::PretenuringHandler::ProcessPretenuringFeedback(), ReduceNewSpaceSize(), v8::internal::StressScavengeObserver::RequestedGCDone(), Scavenge(), SelectGarbageCollector(), semi_space_new_space(), v8::internal::HeapAllocator::Setup(), SetUpSpaces(), v8::internal::GCTracer::StartInSafepoint(), v8::internal::MinorMarkSweepCollector::StartMarking(), StartMinorMSConcurrentMarkingIfNeeded(), StartResizeNewSpace(), v8::internal::MinorMarkSweepCollector::StartSweepNewSpace(), v8::internal::StressScavengeObserver::Step(), v8::internal::MinorMarkSweepCollector::Sweep(), v8::internal::MarkCompactCollector::SweepArrayBufferExtensions(), v8::internal::ScavengerCollector::SweepArrayBufferExtensions(), v8::internal::Sweeper::SweepEmptyNewSpacePage(), Unmark(), YoungGenerationConsumedBytes(), YoungGenerationSizeOfObjects(), and YoungGenerationWastedBytes().

+ Here is the caller graph for this function:

◆ new_space_surviving_object_size()

size_t v8::internal::Heap::new_space_surviving_object_size ( )
inline

Definition at line 1367 of file heap.h.

1367  {
1369  }

Referenced by v8::internal::GCTracer::PrintNVP().

+ Here is the caller graph for this function:

◆ NewSpaceAllocationCounter()

size_t v8::internal::Heap::NewSpaceAllocationCounter ( ) const

Definition at line 999 of file heap.cc.

999  {
1000  size_t counter = new_space_allocation_counter_;
1001  if (new_space_) {
1002  DCHECK(!allocator()->new_space_allocator()->IsLabValid());
1003  counter += new_space()->AllocatedSinceLastGC();
1004  }
1005  return counter;
1006 }
virtual size_t AllocatedSinceLastGC() const =0

References v8::internal::NewSpace::AllocatedSinceLastGC(), allocator(), v8::internal::DCHECK(), new_space(), new_space_, and new_space_allocation_counter_.

Referenced by GarbageCollectionPrologueInSafepoint(), and v8::internal::GCTracer::StartInSafepoint().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NewSpaceAllocationLimitAddress()

Address * v8::internal::Heap::NewSpaceAllocationLimitAddress ( )
inline

Definition at line 166 of file heap-inl.h.

166  {
167  return new_space_ || v8_flags.sticky_mark_bits
169  : nullptr;
170 }
LinearAllocationArea new_allocation_info_
Definition: isolate-data.h:489

References isolate(), v8::internal::Isolate::isolate_data(), v8::internal::LinearAllocationArea::limit_address(), v8::internal::IsolateData::new_allocation_info_, new_space_, and v8::internal::v8_flags.

Referenced by v8::internal::WasmTrustedInstanceData::New().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NewSpaceAllocationTopAddress()

Address * v8::internal::Heap::NewSpaceAllocationTopAddress ( )
inline

Definition at line 160 of file heap-inl.h.

160  {
161  return new_space_ || v8_flags.sticky_mark_bits
163  : nullptr;
164 }

References isolate(), v8::internal::Isolate::isolate_data(), v8::internal::IsolateData::new_allocation_info_, new_space_, v8::internal::LinearAllocationArea::top_address(), and v8::internal::v8_flags.

Referenced by v8::internal::WasmTrustedInstanceData::New().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NewSpaceCapacity()

size_t v8::internal::Heap::NewSpaceCapacity ( ) const

Definition at line 4043 of file heap.cc.

4043  {
4044  if (v8_flags.sticky_mark_bits) {
4046  }
4047  return new_space() ? new_space()->Capacity() : 0;
4048 }

References v8::internal::PagedSpaceBase::Capacity(), v8::internal::NewSpace::Capacity(), new_space(), sticky_space(), v8::internal::v8_flags, and v8::internal::StickySpace::young_objects_size().

Referenced by Capacity(), RecordStats(), and SetUpSpaces().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NewSpaceLimit()

Address v8::internal::Heap::NewSpaceLimit ( )
inline

Definition at line 235 of file heap-inl.h.

235  {
236  return new_space_ || v8_flags.sticky_mark_bits
238  : kNullAddress;
239 }

References allocator(), v8::internal::kNullAddress, v8::internal::MainAllocator::limit(), new_space_, v8::internal::HeapAllocator::new_space_allocator(), and v8::internal::v8_flags.

+ Here is the call graph for this function:

◆ NewSpaceSize()

size_t v8::internal::Heap::NewSpaceSize ( )

Definition at line 4036 of file heap.cc.

4036  {
4037  if (v8_flags.sticky_mark_bits) {
4038  return sticky_space()->young_objects_size();
4039  }
4040  return new_space() ? new_space()->Size() : 0;
4041 }

References new_space(), sticky_space(), v8::internal::v8_flags, and v8::internal::StickySpace::young_objects_size().

Referenced by PerformGarbageCollection(), PrintShortHeapStatistics(), and RecordStats().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NewSpaceTargetCapacity()

size_t v8::internal::Heap::NewSpaceTargetCapacity ( ) const

Definition at line 4050 of file heap.cc.

4050  {
4051  if (v8_flags.sticky_mark_bits) {
4052  // TODO(333906585): Adjust target capacity for new sticky-space.
4054  }
4055  return new_space() ? new_space()->TotalCapacity() : 0;
4056 }

References v8::internal::PagedSpaceBase::Capacity(), new_space(), sticky_space(), v8::internal::NewSpace::TotalCapacity(), v8::internal::v8_flags, and v8::internal::StickySpace::young_objects_size().

Referenced by IncrementalMarkingLimitReached(), PerformGarbageCollection(), v8::internal::PretenuringHandler::ProcessPretenuringFeedback(), and StartIncrementalMarkingIfAllocationLimitIsReached().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NewSpaceTop()

Address v8::internal::Heap::NewSpaceTop ( )
inline

Definition at line 229 of file heap-inl.h.

229  {
230  return new_space_ || v8_flags.sticky_mark_bits
232  : kNullAddress;
233 }

References allocator(), v8::internal::kNullAddress, new_space_, v8::internal::HeapAllocator::new_space_allocator(), v8::internal::MainAllocator::top(), and v8::internal::v8_flags.

Referenced by v8::internal::MinorMarkSweepCollector::TraceFragmentation().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NextDebuggingId()

int v8::internal::Heap::NextDebuggingId ( )

Definition at line 7616 of file heap.cc.

7616  {
7617  int last_id = last_debugging_id().value();
7618  if (last_id == DebugInfo::DebuggingIdBits::kMax) {
7619  last_id = DebugInfo::kNoDebuggingId;
7620  }
7621  last_id++;
7622  set_last_debugging_id(Smi::FromInt(last_id));
7623  return last_id;
7624 }

References v8::internal::Smi::FromInt(), and v8::internal::DebugInfo::kNoDebuggingId.

Referenced by v8::internal::Debug::GetFunctionDebuggingId().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NextScriptId()

int v8::internal::Heap::NextScriptId ( )

Definition at line 7592 of file heap.cc.

7592  {
7593  FullObjectSlot last_script_id_slot(&roots_table()[RootIndex::kLastScriptId]);
7594  Tagged<Smi> last_id = Cast<Smi>(last_script_id_slot.Relaxed_Load());
7595  Tagged<Smi> new_id, last_id_before_cas;
7596  do {
7597  if (last_id.value() == Smi::kMaxValue) {
7598  static_assert(v8::UnboundScript::kNoScriptId == 0);
7599  new_id = Smi::FromInt(1);
7600  } else {
7601  new_id = Smi::FromInt(last_id.value() + 1);
7602  }
7603 
7604  // CAS returns the old value on success, and the current value in the slot
7605  // on failure. Therefore, we want to break if the returned value matches the
7606  // old value (last_id), and keep looping (with the new last_id value) if it
7607  // doesn't.
7608  last_id_before_cas = last_id;
7609  last_id =
7610  Cast<Smi>(last_script_id_slot.Relaxed_CompareAndSwap(last_id, new_id));
7611  } while (last_id != last_id_before_cas);
7612 
7613  return new_id.value();
7614 }

References v8::internal::Smi::FromInt(), v8::internal::Smi::kMaxValue, v8::UnboundScript::kNoScriptId, v8::internal::FullObjectSlot::Relaxed_CompareAndSwap(), v8::internal::FullObjectSlot::Relaxed_Load(), roots_table(), and v8::internal::Tagged< Smi >::value().

Referenced by v8::internal::Isolate::GetNextScriptId().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NextStackTraceId()

int v8::internal::Heap::NextStackTraceId ( )

Definition at line 7626 of file heap.cc.

7626  {
7627  int last_id = last_stack_trace_id().value();
7628  if (last_id == Smi::kMaxValue) {
7629  last_id = 0;
7630  }
7631  last_id++;
7632  set_last_stack_trace_id(Smi::FromInt(last_id));
7633  return last_id;
7634 }

References v8::internal::Smi::FromInt(), and v8::internal::Smi::kMaxValue.

+ Here is the call graph for this function:

◆ NextStressMarkingLimit()

int v8::internal::Heap::NextStressMarkingLimit ( )
private

Definition at line 6128 of file heap.cc.

6128  {
6129  return isolate()->fuzzer_rng()->NextInt(v8_flags.stress_marking + 1);
6130 }
int NextInt() V8_WARN_UNUSED_RESULT
base::RandomNumberGenerator * fuzzer_rng()
Definition: isolate.cc:6431

References v8::internal::Isolate::fuzzer_rng(), isolate(), v8::base::RandomNumberGenerator::NextInt(), and v8::internal::v8_flags.

Referenced by GarbageCollectionEpilogueInSafepoint(), and SetUpSpaces().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ non_atomic_marking_state()

NonAtomicMarkingState* v8::internal::Heap::non_atomic_marking_state ( )
inline

Definition at line 1679 of file heap.h.

1679  {
1680  return &non_atomic_marking_state_;
1681  }

Referenced by v8::internal::MarkCompactCollector::IsUnmarkedSharedHeapObject().

+ Here is the caller graph for this function:

◆ NotifyBootstrapComplete()

void v8::internal::Heap::NotifyBootstrapComplete ( )

Definition at line 6191 of file heap.cc.

6191  {
6192  // This function is invoked for each native context creation. We are
6193  // interested only in the first native context.
6196  }
6197 }
size_t old_generation_capacity_after_bootstrap_
Definition: heap.h:2181

References old_generation_capacity_after_bootstrap_, and OldGenerationCapacity().

Referenced by v8::internal::Bootstrapper::CreateEnvironment().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NotifyContextDisposed()

int v8::internal::Heap::NotifyContextDisposed ( bool  has_dependent_context)

Definition at line 1905 of file heap.cc.

1905  {
1906  if (!has_dependent_context) {
1912  }
1913  if (memory_reducer_) {
1914  memory_reducer_->NotifyPossibleGarbage();
1915  }
1916  } else if (v8_flags.idle_gc_on_context_disposal &&
1917  !v8_flags.single_generation) {
1920  }
1921  if (!isolate()->context().is_null()) {
1922  RemoveDirtyFinalizationRegistriesOnContext(isolate()->raw_native_context());
1923  isolate()->raw_native_context()->set_retained_maps(
1924  ReadOnlyRoots(this).empty_weak_array_list());
1925  }
1926 
1927  return ++contexts_disposed_;
1928 }
void EnsureMinimumRemainingAllocationLimit(size_t at_least_remaining)
Definition: heap.cc:3253
void RemoveDirtyFinalizationRegistriesOnContext(Tagged< NativeContext > context)
Definition: heap.cc:7097
static void TryPostJob(Heap *heap)
Definition: heap.cc:1825
Tagged< NativeContext > raw_native_context()
Definition: isolate-inl.h:53
#define DCHECK_NOT_NULL(val)
Definition: logging.h:491

References contexts_disposed_, DCHECK_NOT_NULL, EnsureMinimumRemainingAllocationLimit(), initial_old_generation_size_, initial_size_overwritten_, isolate(), memory_reducer_, new_space(), preconfigured_old_generation_size_, v8::internal::Isolate::raw_native_context(), ReadOnlyRoots, RemoveDirtyFinalizationRegistriesOnContext(), ResetOldGenerationAndGlobalAllocationLimit(), v8::internal::GCTracer::ResetSurvivalEvents(), tracer(), v8::internal::IdleTaskOnContextDispose::TryPostJob(), and v8::internal::v8_flags.

Referenced by v8::Isolate::ContextDisposedNotification().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NotifyDeserializationComplete()

void v8::internal::Heap::NotifyDeserializationComplete ( )

Definition at line 6159 of file heap.cc.

6159  {
6160  // There are no concurrent/background threads yet.
6162 
6164 
6165 #if DEBUG
6166  PagedSpaceIterator spaces(this);
6167  for (PagedSpace* s = spaces.Next(); s != nullptr; s = spaces.Next()) {
6168  // All pages right after bootstrapping must be marked as never-evacuate.
6169  for (PageMetadata* p : *s) {
6170  DCHECK(p->Chunk()->NeverEvacuate());
6171  }
6172  }
6173 #endif // DEBUG
6174 
6175  if (v8_flags.stress_concurrent_allocation) {
6182  }
6183 
6184  // Deserialization will never create objects in new space.
6185  DCHECK_IMPLIES(new_space(), new_space()->Size() == 0);
6186  DCHECK_IMPLIES(new_lo_space(), new_lo_space()->Size() == 0);
6187 
6189 }
friend class StressConcurrentAllocationObserver
Definition: heap.h:2530
bool need_to_remove_stress_concurrent_allocation_observer_
Definition: heap.h:2382
std::unique_ptr< AllocationObserver > stress_concurrent_allocation_observer_
Definition: heap.h:2344
void AddAllocationObserversToAllSpaces(AllocationObserver *observer, AllocationObserver *new_space_observer)
Definition: heap.cc:1027
V8_EXPORT_PRIVATE void AssertMainThreadIsOnlyThread()
Definition: safepoint.cc:304

References AddAllocationObserversToAllSpaces(), v8::internal::IsolateSafepoint::AssertMainThreadIsOnlyThread(), v8::internal::DCHECK(), DCHECK_IMPLIES, deserialization_complete_, FreeMainThreadLinearAllocationAreas(), need_to_remove_stress_concurrent_allocation_observer_, new_lo_space(), new_space(), v8::internal::PagedSpaceIterator::Next(), safepoint(), stress_concurrent_allocation_observer_, StressConcurrentAllocationObserver, and v8::internal::v8_flags.

Referenced by v8::internal::Isolate::Init().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NotifyLoadingEnded()

void v8::internal::Heap::NotifyLoadingEnded ( )
private

Definition at line 7582 of file heap.cc.

7582  {
7583  load_start_time_ms_.store(kLoadTimeNotLoading, std::memory_order_relaxed);
7585  if (auto* job = incremental_marking()->incremental_marking_job()) {
7586  // The task will start incremental marking (if needed not already started)
7587  // and advance marking if incremental marking is active.
7588  job->ScheduleTask(TaskPriority::kUserVisible);
7589  }
7590 }
static constexpr double kLoadTimeNotLoading
Definition: heap.h:2467
void RecomputeLimitsAfterLoadingIfNeeded()
Definition: heap.cc:2659
std::atomic< double > load_start_time_ms_
Definition: heap.h:2470
@ kUserVisible
User visible tasks are long running background tasks that will improve performance and memory usage o...

References incremental_marking(), kLoadTimeNotLoading, v8::kUserVisible, load_start_time_ms_, and RecomputeLimitsAfterLoadingIfNeeded().

Referenced by v8::internal::Isolate::SetIsLoading().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NotifyLoadingStarted()

void v8::internal::Heap::NotifyLoadingStarted ( )
private

Definition at line 7575 of file heap.cc.

7575  {
7577  double now_ms = MonotonicallyIncreasingTimeInMs();
7578  DCHECK_NE(now_ms, kLoadTimeNotLoading);
7579  load_start_time_ms_.store(now_ms, std::memory_order_relaxed);
7580 }

References DCHECK_NE, kLoadTimeNotLoading, load_start_time_ms_, MonotonicallyIncreasingTimeInMs(), and update_allocation_limits_after_loading_.

Referenced by v8::internal::Isolate::SetIsLoading().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NotifyObjectLayoutChange()

void v8::internal::Heap::NotifyObjectLayoutChange ( Tagged< HeapObject object,
const DisallowGarbageCollection ,
InvalidateRecordedSlots  invalidate_recorded_slots,
InvalidateExternalPointerSlots  invalidate_external_pointer_slots,
int  new_size = 0 
)

Definition at line 4130 of file heap.cc.

4134  {
4135  if (invalidate_recorded_slots == InvalidateRecordedSlots::kYes) {
4136  const bool may_contain_recorded_slots = MayContainRecordedSlots(object);
4137  MutablePageMetadata* const chunk =
4139  // Do not remove the recorded slot in the map word as this one can never be
4140  // invalidated.
4141  const Address clear_range_start = object.address() + kTaggedSize;
4142  // Only slots in the range of the new object size (which is potentially
4143  // smaller than the original one) can be invalidated. Clearing of recorded
4144  // slots up to the original object size even conflicts with concurrent
4145  // sweeping.
4146  const Address clear_range_end = object.address() + new_size;
4147 
4148  if (incremental_marking()->IsMarking()) {
4149  ObjectLock::Lock(object);
4151  pending_layout_change_object_address = object.address();
4152  if (may_contain_recorded_slots && incremental_marking()->IsCompacting()) {
4154  chunk, clear_range_start, clear_range_end,
4155  SlotSet::EmptyBucketMode::KEEP_EMPTY_BUCKETS);
4156  }
4157  }
4158 
4159  if (may_contain_recorded_slots) {
4161  chunk, clear_range_start, clear_range_end,
4162  SlotSet::EmptyBucketMode::KEEP_EMPTY_BUCKETS);
4164  chunk, clear_range_start, clear_range_end,
4165  SlotSet::EmptyBucketMode::KEEP_EMPTY_BUCKETS);
4167  chunk, clear_range_start, clear_range_end,
4168  SlotSet::EmptyBucketMode::KEEP_EMPTY_BUCKETS);
4169  }
4170 
4171  DCHECK(!chunk->InTrustedSpace());
4172  }
4173 
4174  // During external pointer table compaction, the external pointer table
4175  // records addresses of fields that index into the external pointer table. As
4176  // such, it needs to be informed when such a field is invalidated.
4177  if (invalidate_external_pointer_slots ==
4179  // Currently, the only time this function receives
4180  // InvalidateExternalPointerSlots::kYes is when an external string
4181  // transitions to a thin string. If this ever changed to happen for array
4182  // buffer extension slots, we would have to run the invalidator in
4183  // pointer-compression-but-no-sandbox configurations as well.
4184  DCHECK(IsString(object));
4185 #ifdef V8_ENABLE_SANDBOX
4186  if (V8_ENABLE_SANDBOX_BOOL) {
4187  ExternalPointerSlotInvalidator slot_invalidator(isolate());
4188  int num_invalidated_slots = slot_invalidator.Visit(object);
4189  USE(num_invalidated_slots);
4190  DCHECK_GT(num_invalidated_slots, 0);
4191  }
4192 
4193  // During concurrent marking for a minor GC, the heap also builds up a
4194  // RememberedSet of external pointer field locations, and uses that set to
4195  // evacuate external pointer table entries when promoting objects. Here we
4196  // would need to invalidate that set too; until we do, assert that
4197  // NotifyObjectLayoutChange is never called on young objects.
4199 #endif
4200  }
4201 
4202 #ifdef VERIFY_HEAP
4203  if (v8_flags.verify_heap) {
4204  HeapVerifier::SetPendingLayoutChangeObject(this, object);
4205  }
4206 #endif
4207 }
static void Lock(Tagged< HeapObject > heap_object)
#define V8_ENABLE_SANDBOX_BOOL
Definition: globals.h:168
thread_local Address pending_layout_change_object_address
Definition: heap.cc:4086

References CHECK, v8::internal::DCHECK(), DCHECK_EQ, DCHECK_GT, v8::internal::MutablePageMetadata::FromHeapObject(), incremental_marking(), v8::internal::MemoryChunkMetadata::InTrustedSpace(), v8::internal::HeapLayout::InYoungGeneration(), isolate(), v8::internal::kNullAddress, v8::internal::kTaggedSize, v8::internal::kYes, v8::internal::ObjectLock::Lock(), v8::internal::anonymous_namespace{heap.cc}::MayContainRecordedSlots(), v8::internal::anonymous_namespace{heap.cc}::pending_layout_change_object_address, v8::internal::RememberedSet< type >::RemoveRange(), USE, V8_ENABLE_SANDBOX_BOOL, and v8::internal::v8_flags.

Referenced by v8::internal::TranslatedState::InitializeJSObjectAt(), v8::internal::TranslatedState::InitializeObjectWithTaggedFieldsAt(), and v8::internal::String::MakeExternal().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NotifyObjectLayoutChangeDone()

void v8::internal::Heap::NotifyObjectLayoutChangeDone ( Tagged< HeapObject object)
static

Definition at line 4210 of file heap.cc.

4210  {
4213  ObjectLock::Unlock(object);
4215  }
4216 }
static void Unlock(Tagged< HeapObject > heap_object)

References DCHECK_EQ, v8::internal::kNullAddress, v8::internal::anonymous_namespace{heap.cc}::pending_layout_change_object_address, and v8::internal::ObjectLock::Unlock().

Referenced by v8::internal::HeapObject::set_map().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NotifyObjectSizeChange()

void v8::internal::Heap::NotifyObjectSizeChange ( Tagged< HeapObject object,
int  old_size,
int  new_size,
ClearRecordedSlots  clear_recorded_slots 
)

Definition at line 4218 of file heap.cc.

4220  {
4221  old_size = ALIGN_TO_ALLOCATION_ALIGNMENT(old_size);
4222  new_size = ALIGN_TO_ALLOCATION_ALIGNMENT(new_size);
4223  DCHECK_LE(new_size, old_size);
4224  DCHECK(!IsLargeObject(object));
4225  if (new_size == old_size) return;
4226 
4227  const bool is_main_thread = LocalHeap::Current() == nullptr;
4228 
4229  DCHECK_IMPLIES(!is_main_thread,
4230  clear_recorded_slots == ClearRecordedSlots::kNo);
4231 
4232  const auto verify_no_slots_recorded =
4234 
4235  const auto clear_memory_mode = ClearFreedMemoryMode::kDontClearFreedMemory;
4236 
4237  const Address filler = object.address() + new_size;
4238  const int filler_size = old_size - new_size;
4240  WritableFreeSpace::ForNonExecutableMemory(filler, filler_size),
4241  clear_memory_mode, clear_recorded_slots, verify_no_slots_recorded);
4242 }

References ALIGN_TO_ALLOCATION_ALIGNMENT, CreateFillerObjectAtRaw(), v8::internal::DCHECK(), DCHECK_IMPLIES, DCHECK_LE, v8::internal::WritableFreeSpace::ForNonExecutableMemory(), IsLargeObject(), v8::internal::kDontClearFreedMemory, v8::internal::kNo, kNo, and kYes.

Referenced by v8::internal::String::MakeExternal(), v8::internal::String::MakeExternalDuringGC(), v8::internal::LocalHeap::NotifyObjectSizeChange(), RightTrimArray(), and v8::internal::anonymous_namespace{bigint.cc}::RightTrimString().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NotifyOldGenerationExpansion()

void v8::internal::Heap::NotifyOldGenerationExpansion ( LocalHeap local_heap,
AllocationSpace  space,
MutablePageMetadata chunk,
OldGenerationExpansionNotificationOrigin  notification_origin = OldGenerationExpansionNotificationOrigin::kFromSameHeap 
)

Definition at line 6199 of file heap.cc.

6202  {
6203  // Pages created during bootstrapping may contain immortal immovable objects.
6204  if (!deserialization_complete()) {
6205  DCHECK_NE(NEW_SPACE, chunk_metadata->owner()->identity());
6206  chunk_metadata->Chunk()->MarkNeverEvacuate();
6207  }
6208  if (IsAnyCodeSpace(space)) {
6209  isolate()->AddCodeMemoryChunk(chunk_metadata);
6210  }
6211 
6212  // Don't notify MemoryReducer when calling from client heap as otherwise not
6213  // thread safe.
6214  const size_t kMemoryReducerActivationThreshold = 1 * MB;
6215  if (local_heap->is_main_thread_for(this) && memory_reducer() != nullptr &&
6218  kMemoryReducerActivationThreshold &&
6219  (notification_origin ==
6221  v8_flags.memory_reducer_for_small_heaps) {
6223  }
6224 }
void AddCodeMemoryChunk(MutablePageMetadata *chunk)
Definition: isolate.cc:7531

References v8::internal::Isolate::AddCodeMemoryChunk(), v8::internal::MemoryChunkMetadata::Chunk(), DCHECK_NE, deserialization_complete(), v8::internal::BaseSpace::identity(), v8::internal::LocalHeap::is_main_thread_for(), v8::internal::IsAnyCodeSpace(), isolate(), kFromSameHeap, v8::internal::MemoryChunk::MarkNeverEvacuate(), v8::internal::MB, memory_reducer(), ms_count_, v8::internal::NEW_SPACE, v8::internal::MemoryReducer::NotifyPossibleGarbage(), old_generation_capacity_after_bootstrap_, OldGenerationCapacity(), v8::internal::MutablePageMetadata::owner(), space(), and v8::internal::v8_flags.

Referenced by v8::internal::OldLargeObjectSpace::AllocateRaw().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NumberOfDetachedContexts()

size_t v8::internal::Heap::NumberOfDetachedContexts ( )

Definition at line 7225 of file heap.cc.

7225  {
7226  // The detached_contexts() array has two entries per detached context.
7227  return detached_contexts()->length() / 2;
7228 }

◆ NumberOfNativeContexts()

size_t v8::internal::Heap::NumberOfNativeContexts ( )

Definition at line 7192 of file heap.cc.

7192  {
7193  int result = 0;
7195  while (!IsUndefined(context, isolate())) {
7196  ++result;
7197  Tagged<Context> native_context = Cast<Context>(context);
7198  context = native_context->next_context_link();
7199  }
7200  return result;
7201 }

References isolate(), v8::internal::native_context, native_contexts_list(), and v8::base::internal::result.

+ Here is the call graph for this function:

◆ NumberOfScavengeTasks()

int v8::internal::Heap::NumberOfScavengeTasks ( )
private

◆ NumberOfTrackedHeapObjectTypes()

size_t v8::internal::Heap::NumberOfTrackedHeapObjectTypes ( )

Definition at line 7151 of file heap.cc.

7151  {
7153 }

References v8::internal::ObjectStats::OBJECT_STATS_COUNT.

◆ ObjectCountAtLastGC()

size_t v8::internal::Heap::ObjectCountAtLastGC ( size_t  index)

Definition at line 7155 of file heap.cc.

7155  {
7157  return 0;
7158  return live_object_stats_->object_count_last_gc(index);
7159 }

References v8::internal::index, live_object_stats_, and v8::internal::ObjectStats::OBJECT_STATS_COUNT.

◆ ObjectSizeAtLastGC()

size_t v8::internal::Heap::ObjectSizeAtLastGC ( size_t  index)

Definition at line 7161 of file heap.cc.

7161  {
7163  return 0;
7164  return live_object_stats_->object_size_last_gc(index);
7165 }

References v8::internal::index, live_object_stats_, and v8::internal::ObjectStats::OBJECT_STATS_COUNT.

◆ old_generation_allocation_limit()

size_t v8::internal::Heap::old_generation_allocation_limit ( ) const
inlineprivate

Definition at line 1983 of file heap.h.

1983  {
1984  return old_generation_allocation_limit_.load(std::memory_order_relaxed);
1985  }
std::atomic< size_t > old_generation_allocation_limit_
Definition: heap.h:2292

Referenced by AllocationLimitOvershotByLargeMargin(), EnsureMinimumRemainingAllocationLimit(), PercentToOldGenerationLimit(), v8::internal::GCTracer::PrintNVP(), RecomputeLimits(), RecomputeLimitsAfterLoadingIfNeeded(), ShrinkOldGenerationAllocationLimitIfNotConfigured(), v8::internal::IncrementalMarking::Start(), and v8::internal::IncrementalMarking::Stop().

+ Here is the caller graph for this function:

◆ old_space()

◆ OldArrayBufferBytes()

size_t v8::internal::Heap::OldArrayBufferBytes ( )

Definition at line 6972 of file heap.cc.

6972  {
6973  return array_buffer_sweeper()->OldBytes();
6974 }
ArrayBufferSweeper * array_buffer_sweeper()
Definition: heap.h:866

References array_buffer_sweeper(), and v8::internal::ArrayBufferSweeper::OldBytes().

+ Here is the call graph for this function:

◆ OldGenerationAllocationCounter()

size_t v8::internal::Heap::OldGenerationAllocationCounter ( )
inline

Definition at line 1398 of file heap.h.

1398  {
1401  }
size_t PromotedSinceLastGC()
Definition: heap.h:2126

Referenced by v8::internal::GCTracer::StartInSafepoint().

+ Here is the caller graph for this function:

◆ OldGenerationCapacity()

size_t v8::internal::Heap::OldGenerationCapacity ( ) const

Definition at line 333 of file heap.cc.

333  {
334  if (!HasBeenSetUp()) return 0;
335  PagedSpaceIterator spaces(this);
336  size_t total = 0;
337  for (PagedSpace* space = spaces.Next(); space != nullptr;
338  space = spaces.Next()) {
339  total += space->Capacity();
340  }
341  if (shared_lo_space_) {
342  total += shared_lo_space_->SizeOfObjects();
343  }
344  return total + lo_space_->SizeOfObjects() + code_lo_space_->SizeOfObjects() +
346 }
size_t SizeOfObjects() const override
Definition: large-spaces.h:47

References code_lo_space_, HasBeenSetUp(), lo_space_, v8::internal::PagedSpaceIterator::Next(), shared_lo_space_, v8::internal::LargeObjectSpace::SizeOfObjects(), space(), and trusted_lo_space_.

Referenced by CanExpandOldGeneration(), Capacity(), IsOldGenerationExpansionAllowed(), NotifyBootstrapComplete(), and NotifyOldGenerationExpansion().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OldGenerationConsumedBytes()

size_t v8::internal::Heap::OldGenerationConsumedBytes ( ) const

Definition at line 5381 of file heap.cc.

5381  {
5383 }

References OldGenerationSizeOfObjects(), and OldGenerationWastedBytes().

Referenced by AllocationLimitOvershotByLargeMargin(), EnsureMinimumRemainingAllocationLimit(), PercentToOldGenerationLimit(), RecomputeLimits(), v8::internal::GCTracer::RecordGCSizeCounters(), and ShrinkOldGenerationAllocationLimitIfNotConfigured().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OldGenerationConsumedBytesAtLastGC()

size_t v8::internal::Heap::OldGenerationConsumedBytesAtLastGC ( ) const

Definition at line 5435 of file heap.cc.

References old_generation_size_at_last_gc_, and old_generation_wasted_at_last_gc_.

Referenced by GlobalConsumedBytesAtLastGC(), and PercentToOldGenerationLimit().

+ Here is the caller graph for this function:

◆ OldGenerationLowMemory()

size_t v8::internal::Heap::OldGenerationLowMemory ( )
static

Definition at line 5052 of file heap.cc.

5052  {
5053  return 128 * MB * HeapLimitMultiplier();
5054 }

References HeapLimitMultiplier(), and v8::internal::MB.

Referenced by YoungGenerationSizeFromOldGenerationSize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OldGenerationSizeOfObjects()

size_t v8::internal::Heap::OldGenerationSizeOfObjects ( ) const

Definition at line 5351 of file heap.cc.

5351  {
5352  size_t total = 0;
5353  if (v8_flags.sticky_mark_bits)
5354  total += sticky_space()->old_objects_size();
5355  else
5356  total += old_space()->SizeOfObjects();
5357  total += lo_space()->SizeOfObjects();
5358  total += code_space()->SizeOfObjects();
5359  total += code_lo_space()->SizeOfObjects();
5360  if (shared_space()) {
5361  total += shared_space()->SizeOfObjects();
5362  }
5363  if (shared_lo_space()) {
5364  total += shared_lo_space()->SizeOfObjects();
5365  }
5366  total += trusted_space()->SizeOfObjects();
5367  total += trusted_lo_space()->SizeOfObjects();
5368  return total;
5369 }
size_t old_objects_size() const
Definition: paged-spaces.h:482

References code_lo_space(), code_space(), lo_space(), v8::internal::StickySpace::old_objects_size(), old_space(), shared_lo_space(), shared_space(), v8::internal::LargeObjectSpace::SizeOfObjects(), sticky_space(), trusted_lo_space(), trusted_space(), and v8::internal::v8_flags.

Referenced by CollectGarbage(), GlobalSizeOfObjects(), HasHighFragmentation(), v8::internal::MemoryBalancer::HeartbeatUpdate(), v8::internal::IncrementalMarking::IsBelowActivationThresholds(), MarkCompact(), OldGenerationConsumedBytes(), v8::internal::MemoryBalancer::RecomputeLimits(), RecomputeLimitsAfterLoadingIfNeeded(), v8::internal::IncrementalMarking::Start(), and v8::internal::IncrementalMarking::Stop().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OldGenerationSpaceAvailable()

size_t v8::internal::Heap::OldGenerationSpaceAvailable ( )
inlineprivate

Definition at line 1941 of file heap.h.

1941  {
1942  uint64_t bytes = OldGenerationConsumedBytes();
1943  if (!v8_flags.external_memory_accounted_in_global_limit) {
1944  // TODO(chromium:42203776): When not accounting external memory properly
1945  // in the global limit, just add allocated external bytes towards the
1946  // regular old gen bytes. This is historic behavior.
1948  }
1949 
1950  if (old_generation_allocation_limit() <= bytes) return 0;
1951  return old_generation_allocation_limit() - static_cast<size_t>(bytes);
1952  }

References v8::internal::v8_flags.

Referenced by IncrementalMarkingLimitReached(), RecomputeLimitsAfterLoadingIfNeeded(), ShouldExpandOldGenerationOnSlowAllocation(), and StartIncrementalMarkingIfAllocationLimitIsReached().

+ Here is the caller graph for this function:

◆ OldGenerationToSemiSpaceRatio()

size_t v8::internal::Heap::OldGenerationToSemiSpaceRatio ( )
static

Definition at line 5092 of file heap.cc.

5092  {
5093  DCHECK(!v8_flags.minor_ms);
5094  // Compute a ration such that when old gen max capacity is set to the highest
5095  // supported value, young gen max capacity would also be set to the max.
5096  const size_t max_semi_space_size = DefaultMaxSemiSpaceSize();
5097  DCHECK_GT(max_semi_space_size, 0);
5098  return DefaulMaxHeapSize() / max_semi_space_size;
5099 }

References v8::internal::DCHECK(), DCHECK_GT, DefaulMaxHeapSize(), DefaultMaxSemiSpaceSize(), and v8::internal::v8_flags.

Referenced by YoungGenerationSizeFromOldGenerationSize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OldGenerationToSemiSpaceRatioLowMemory()

size_t v8::internal::Heap::OldGenerationToSemiSpaceRatioLowMemory ( )
static

Definition at line 5102 of file heap.cc.

5102  {
5103  static const size_t old_generation_to_semi_space_ratio_low_memory =
5105  return old_generation_to_semi_space_ratio_low_memory /
5106  (v8_flags.minor_ms ? 2 : 1);
5107 }

References HeapLimitMultiplier(), kPointerMultiplier, and v8::internal::v8_flags.

Referenced by YoungGenerationSizeFromOldGenerationSize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OldGenerationWastedBytes()

size_t v8::internal::Heap::OldGenerationWastedBytes ( ) const

Definition at line 5371 of file heap.cc.

5371  {
5372  PagedSpaceIterator spaces(this);
5373  size_t total = 0;
5374  for (PagedSpace* space = spaces.Next(); space != nullptr;
5375  space = spaces.Next()) {
5376  total += space->Waste();
5377  }
5378  return total;
5379 }

References v8::internal::PagedSpaceIterator::Next(), and space().

Referenced by GlobalWastedBytes(), MarkCompact(), OldGenerationConsumedBytes(), RecomputeLimitsAfterLoadingIfNeeded(), v8::internal::IncrementalMarking::Start(), and v8::internal::IncrementalMarking::Stop().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OldSpaceAllocationLimitAddress()

Address * v8::internal::Heap::OldSpaceAllocationLimitAddress ( )
inline

Definition at line 176 of file heap-inl.h.

176  {
178 }
Address * allocation_limit_address() const

References v8::internal::MainAllocator::allocation_limit_address(), allocator(), and v8::internal::HeapAllocator::old_space_allocator().

Referenced by v8::internal::WasmTrustedInstanceData::New().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OldSpaceAllocationTopAddress()

Address * v8::internal::Heap::OldSpaceAllocationTopAddress ( )
inline

Definition at line 172 of file heap-inl.h.

172  {
174 }
Address * allocation_top_address() const

References v8::internal::MainAllocator::allocation_top_address(), allocator(), and v8::internal::HeapAllocator::old_space_allocator().

Referenced by v8::internal::WasmTrustedInstanceData::New().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OnMoveEvent()

void v8::internal::Heap::OnMoveEvent ( Tagged< HeapObject source,
Tagged< HeapObject target,
int  size_in_bytes 
)

Definition at line 3483 of file heap.cc.

3484  {
3485  if (heap_profiler()->is_tracking_object_moves()) {
3486  heap_profiler()->ObjectMoveEvent(source.address(), target.address(),
3487  size_in_bytes,
3488  /*is_embedder_object=*/false);
3489  }
3490  for (auto& tracker : allocation_trackers_) {
3491  tracker->MoveEvent(source.address(), target.address(), size_in_bytes);
3492  }
3493  if (IsSharedFunctionInfo(target, isolate_)) {
3494  LOG_CODE_EVENT(isolate_, SharedFunctionInfoMoveEvent(source.address(),
3495  target.address()));
3496  } else if (IsNativeContext(target, isolate_)) {
3497  if (isolate_->current_embedder_state() != nullptr) {
3498  isolate_->current_embedder_state()->OnMoveEvent(source.address(),
3499  target.address());
3500  }
3501  PROFILE(isolate_,
3502  NativeContextMoveEvent(source.address(), target.address()));
3503  } else if (IsMap(target, isolate_)) {
3504  LOG(isolate_, MapMoveEvent(Cast<Map>(source), Cast<Map>(target)));
3505  }
3506 }
void ObjectMoveEvent(Address from, Address to, int size, bool is_native_object)
#define LOG(isolate, Call)
Definition: log.h:78
#define LOG_CODE_EVENT(isolate, Call)
Definition: log.h:83

References v8::internal::Tagged< HeapObject >::address(), allocation_trackers_, heap_profiler(), isolate_, LOG, LOG_CODE_EVENT, v8::internal::HeapProfiler::ObjectMoveEvent(), and PROFILE.

Referenced by LeftTrimFixedArray(), v8::internal::Scavenger::MigrateObject(), and v8::internal::ProfilingMigrationObserver::Move().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ operator=()

Heap& v8::internal::Heap::operator= ( const Heap )
privatedelete

◆ overridden_stack_state()

std::optional< StackState > v8::internal::Heap::overridden_stack_state ( ) const

Definition at line 6243 of file heap.cc.

6243  {
6244  if (!embedder_stack_state_origin_) return {};
6245  return embedder_stack_state_;
6246 }
std::optional< EmbedderStackStateOrigin > embedder_stack_state_origin_
Definition: heap.h:2375

References embedder_stack_state_, and embedder_stack_state_origin_.

Referenced by v8::internal::CppHeap::DetachIsolate(), and v8::internal::CppHeap::overridden_stack_state().

+ Here is the caller graph for this function:

◆ paged_new_space()

PagedNewSpace * v8::internal::Heap::paged_new_space ( ) const
inline

Definition at line 426 of file heap-inl.h.

426  {
427  return PagedNewSpace::From(new_space());
428 }
static PagedNewSpace * From(NewSpace *space)
Definition: new-spaces.h:598

References v8::internal::PagedNewSpace::From(), and new_space().

Referenced by EnsureSweepingCompleted(), EnsureYoungSweepingCompleted(), v8::internal::Sweeper::PrepareToBeSweptPage(), ReduceNewSpaceSize(), v8::internal::MinorMarkSweepCollector::StartMarking(), StartMinorMSConcurrentMarkingIfNeeded(), StartResizeNewSpace(), v8::internal::MinorMarkSweepCollector::StartSweepNewSpace(), v8::internal::MinorMarkSweepCollector::TraceFragmentation(), v8::internal::MarkCompactCollector::VerifyMarking(), and YoungGenerationWastedBytes().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ paged_space()

PagedSpace * v8::internal::Heap::paged_space ( int  idx) const
inline

Definition at line 152 of file heap-inl.h.

152  {
153  DCHECK(idx == OLD_SPACE || idx == CODE_SPACE || idx == SHARED_SPACE ||
154  idx == TRUSTED_SPACE || idx == SHARED_TRUSTED_SPACE);
155  return static_cast<PagedSpace*>(space_[idx].get());
156 }
std::unique_ptr< Space > space_[LAST_SPACE+1]
Definition: heap.h:2229

References v8::internal::CODE_SPACE, v8::internal::DCHECK(), v8::internal::OLD_SPACE, v8::internal::SHARED_SPACE, v8::internal::SHARED_TRUSTED_SPACE, space_, and v8::internal::TRUSTED_SPACE.

Referenced by v8::internal::MainAllocator::FreeLinearAllocationAreaAndResetFreeList(), v8::internal::PagedSpaceIterator::Next(), v8::internal::Sweeper::PrepareToBeSweptPage(), and v8::internal::PagedSpaceAllocatorPolicy::RefillLab().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PauseConcurrentThreadsInClients()

std::vector< Isolate * > v8::internal::Heap::PauseConcurrentThreadsInClients ( GarbageCollector  collector)
private

Definition at line 2449 of file heap.cc.

2450  {
2451  std::vector<Isolate*> paused_clients;
2452 
2453  if (isolate()->is_shared_space_isolate()) {
2455  [collector, &paused_clients](Isolate* client) {
2456  CHECK(client->heap()->deserialization_complete());
2457 
2458  if (v8_flags.concurrent_marking &&
2459  client->heap()->concurrent_marking()->Pause()) {
2460  paused_clients.push_back(client);
2461  }
2462 
2463  if (collector == GarbageCollector::MARK_COMPACTOR) {
2464  Sweeper* const client_sweeper = client->heap()->sweeper();
2465  client_sweeper->ContributeAndWaitForPromotedPagesIteration();
2466  }
2467  });
2468  }
2469 
2470  return paused_clients;
2471 }
friend class Sweeper
Definition: heap.h:2533

References CHECK, concurrent_marking(), v8::internal::Sweeper::ContributeAndWaitForPromotedPagesIteration(), deserialization_complete(), v8::internal::Isolate::global_safepoint(), v8::internal::Isolate::heap(), isolate(), v8::internal::GlobalSafepoint::IterateClientIsolates(), v8::internal::MARK_COMPACTOR, v8::internal::ConcurrentMarking::Pause(), sweeper(), and v8::internal::v8_flags.

Referenced by PerformGarbageCollection(), and StartIncrementalMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PercentToGlobalMemoryLimit()

double v8::internal::Heap::PercentToGlobalMemoryLimit ( ) const
private

Definition at line 5628 of file heap.cc.

5628  {
5631 }
V8_EXPORT_PRIVATE size_t GlobalConsumedBytesAtLastGC() const
Definition: heap.cc:5439
double PercentToLimit(size_t size_at_gc, size_t size_now, size_t limit)
Definition: heap.cc:5607

References global_allocation_limit(), GlobalConsumedBytes(), GlobalConsumedBytesAtLastGC(), and v8::internal::anonymous_namespace{heap.cc}::PercentToLimit().

Referenced by IncrementalMarkingLimitReached().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PercentToOldGenerationLimit()

double v8::internal::Heap::PercentToOldGenerationLimit ( ) const
private

Definition at line 5622 of file heap.cc.

References old_generation_allocation_limit(), OldGenerationConsumedBytes(), OldGenerationConsumedBytesAtLastGC(), and v8::internal::anonymous_namespace{heap.cc}::PercentToLimit().

Referenced by IncrementalMarkingLimitReached().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PerformGarbageCollection()

void v8::internal::Heap::PerformGarbageCollection ( GarbageCollector  collector,
GarbageCollectionReason  gc_reason,
const char *  collector_reason 
)
private

Definition at line 2299 of file heap.cc.

2301  {
2302  if (IsYoungGenerationCollector(collector)) {
2303  if (v8_flags.sticky_mark_bits) {
2305  // TODO(333906585): It's not necessary to complete full sweeping here.
2306  // Make sure that only the OLD_SPACE is swept.
2308  } else {
2310  if (v8_flags.verify_heap) {
2311  // If heap verification is enabled, we want to ensure that sweeping is
2312  // completed here, as it will be triggered from Heap::Verify anyway.
2313  // In this way, sweeping finalization is accounted to the corresponding
2314  // full GC cycle.
2316  }
2317  }
2318  } else {
2321  }
2322 
2323  const base::TimeTicks atomic_pause_start_time = base::TimeTicks::Now();
2324 
2325  std::optional<SafepointScope> safepoint_scope;
2326  {
2327  AllowGarbageCollection allow_shared_gc;
2328  safepoint_scope.emplace(isolate(), kGlobalSafepointForSharedSpaceIsolate);
2329  }
2330 
2331  if (!incremental_marking_->IsMarking() ||
2332  (collector == GarbageCollector::SCAVENGER)) {
2333  tracer()->StartCycle(collector, gc_reason, collector_reason,
2335  }
2336 
2337  tracer()->StartAtomicPause();
2338  if ((!Heap::IsYoungGenerationCollector(collector) || v8_flags.minor_ms) &&
2339  incremental_marking_->IsMarking()) {
2341  incremental_marking_->IsMinorMarking());
2342  tracer()->UpdateCurrentEvent(gc_reason, collector_reason);
2343  }
2344 
2345  DCHECK(tracer()->IsConsistentWithCollector(collector));
2347 
2348  collection_barrier_->StopTimeToCollectionTimer();
2349 
2350  std::vector<Isolate*> paused_clients =
2352 
2354 
2355  tracer()->StartInSafepoint(atomic_pause_start_time);
2356 
2358 
2360 
2361  const size_t start_young_generation_size =
2363 
2364  // Make sure allocation observers are disabled until the new new space
2365  // capacity is set in the epilogue.
2366  PauseAllocationObserversScope pause_observers(this);
2367 
2368  const size_t new_space_capacity_before_gc = NewSpaceTargetCapacity();
2369 
2370  if (collector == GarbageCollector::MARK_COMPACTOR) {
2371  MarkCompact();
2372  } else if (collector == GarbageCollector::MINOR_MARK_SWEEPER) {
2373  MinorMarkSweep();
2374  } else {
2376  Scavenge();
2377  }
2378 
2379  // We don't want growing or shrinking of the current cycle to affect
2380  // pretenuring decisions. The numbers collected in the GC will be for the
2381  // capacity that was set before the GC.
2382  pretenuring_handler_.ProcessPretenuringFeedback(new_space_capacity_before_gc);
2383 
2384  UpdateSurvivalStatistics(static_cast<int>(start_young_generation_size));
2386 
2387  isolate_->counters()->objs_since_last_young()->Set(0);
2388 
2390 
2391  // Update relocatables.
2393 
2395  // Allows handle derefs for all threads/isolates from this thread.
2396  AllowHandleUsageOnAllThreads allow_all_handle_derefs;
2399  });
2400  }
2401 
2402  // First round weak callbacks are not supposed to allocate and trigger
2403  // nested GCs.
2405 
2406  if (cpp_heap() && (collector == GarbageCollector::MARK_COMPACTOR ||
2407  collector == GarbageCollector::MINOR_MARK_SWEEPER)) {
2408  // TraceEpilogue may trigger operations that invalidate global handles. It
2409  // has to be called *after* all other operations that potentially touch
2410  // and reset global handles. It is also still part of the main garbage
2411  // collection pause and thus needs to be called *before* any operation
2412  // that can potentially trigger recursive garbage collections.
2413  TRACE_GC(tracer(), GCTracer::Scope::HEAP_EMBEDDER_TRACING_EPILOGUE);
2415  }
2416 
2417  if (collector == GarbageCollector::MARK_COMPACTOR) {
2419  }
2420 
2422 
2424 
2425  const base::TimeTicks atomic_pause_end_time = base::TimeTicks::Now();
2426  tracer()->StopInSafepoint(atomic_pause_end_time);
2427 
2428  ResumeConcurrentThreadsInClients(std::move(paused_clients));
2429 
2430  // After every full GC the old generation allocation limit should be
2431  // configured.
2433  !using_initial_limit());
2434 }
void StopInSafepoint(base::TimeTicks time)
Definition: gc-tracer.cc:323
void UpdateCurrentEvent(GarbageCollectionReason gc_reason, const char *collector_reason)
Definition: gc-tracer.cc:211
void StartInSafepoint(base::TimeTicks time)
Definition: gc-tracer.cc:308
void StartCycle(GarbageCollector collector, GarbageCollectionReason gc_reason, const char *collector_reason, MarkingType marking)
Definition: gc-tracer.cc:228
std::vector< Isolate * > PauseConcurrentThreadsInClients(GarbageCollector collector)
Definition: heap.cc:2449
void CompleteSweepingYoung()
Definition: heap.cc:2523
void PerformHeapVerification()
Definition: heap.cc:2436
void Scavenge()
Definition: heap.cc:2800
void GarbageCollectionEpilogueInSafepoint(GarbageCollector collector)
Definition: heap.cc:1074
size_t NewSpaceSize()
Definition: heap.cc:4036
void MarkCompact()
Definition: heap.cc:2733
V8_EXPORT_PRIVATE void CompleteSweepingFull()
Definition: heap.cc:2022
void GarbageCollectionPrologueInSafepoint(GarbageCollector collector)
Definition: heap.cc:989
friend class PauseAllocationObserversScope
Definition: heap.h:2521
void ShrinkOldGenerationAllocationLimitIfNotConfigured()
Definition: heap.cc:3225
void MinorMarkSweep()
Definition: heap.cc:2768
void ResumeConcurrentThreadsInClients(std::vector< Isolate * > paused_clients)
Definition: heap.cc:2473
void UpdateSurvivalStatistics(int start_new_space_size)
Definition: heap.cc:2247
bool is_shared_space_isolate() const
Definition: isolate.h:2332
void ProcessPretenuringFeedback(size_t new_space_capacity_before_gc)
static void PostGarbageCollectionProcessing(Isolate *isolate)
Definition: objects.cc:4116
#define TRACE_GC_EPOCH(tracer, scope_id, thread_kind)
Definition: gc-tracer.h:77
GCTracer::Scope::ScopeId CollectorScopeId(GarbageCollector collector)
Definition: heap.cc:2271
void ClearStubCaches(Isolate *isolate)
Definition: heap.cc:2283

References v8::internal::anonymous_namespace{heap.cc}::ClearStubCaches(), collection_barrier_, v8::internal::anonymous_namespace{heap.cc}::CollectorScopeId(), v8::internal::CppHeap::CompactAndSweep(), CompleteSweepingFull(), CompleteSweepingYoung(), v8::internal::Isolate::counters(), cpp_heap(), v8::internal::DCHECK(), DCHECK_EQ, DCHECK_IMPLIES, v8::internal::Isolate::eternal_handles(), FreeLinearAllocationAreas(), v8::internal::CppHeap::From(), GarbageCollectionEpilogueInSafepoint(), GarbageCollectionPrologueInSafepoint(), v8::internal::Isolate::global_handles(), v8::internal::Isolate::global_safepoint(), incremental_marking_, v8::internal::GlobalHandles::InvokeFirstPassWeakCallbacks(), v8::internal::Isolate::is_shared_space_isolate(), isolate(), isolate_, IsYoungGenerationCollector(), v8::internal::GlobalSafepoint::IterateClientIsolates(), v8::internal::GCTracer::kAtomic, v8::internal::kGlobalSafepointForSharedSpaceIsolate, v8::internal::kMain, v8::internal::MARK_COMPACTOR, MarkCompact(), v8::internal::MINOR_MARK_SWEEPER, MinorMarkSweep(), new_lo_space(), NewSpaceSize(), NewSpaceTargetCapacity(), v8::base::TimeTicks::Now(), PauseConcurrentThreadsInClients(), PerformHeapVerification(), v8::internal::EternalHandles::PostGarbageCollectionProcessing(), v8::internal::Relocatable::PostGarbageCollectionProcessing(), pretenuring_handler_, v8::internal::PretenuringHandler::ProcessPretenuringFeedback(), ResumeConcurrentThreadsInClients(), Scavenge(), v8::internal::SCAVENGER, ShrinkOldGenerationAllocationLimitIfNotConfigured(), v8::internal::LargeObjectSpace::SizeOfObjects(), v8::internal::GCTracer::StartAtomicPause(), v8::internal::GCTracer::StartCycle(), v8::internal::GCTracer::StartInSafepoint(), v8::internal::GCTracer::StopInSafepoint(), TRACE_GC, TRACE_GC_EPOCH, tracer(), v8::internal::GCTracer::UpdateCurrentEvent(), UpdateSurvivalStatistics(), using_initial_limit(), and v8::internal::v8_flags.

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PerformHeapVerification()

void v8::internal::Heap::PerformHeapVerification ( )
private

Definition at line 2436 of file heap.cc.

2436  {
2438 
2439  if (isolate()->is_shared_space_isolate()) {
2440  // Allow handle creation for client isolates even if they are parked. This
2441  // is because some object verification methods create handles.
2442  AllowHandleUsageOnAllThreads allow_handle_creation;
2444  HeapVerifier::VerifyHeapIfEnabled(client->heap());
2445  });
2446  }
2447 }
static void VerifyHeapIfEnabled(Heap *heap)
Definition: heap-verifier.h:78

References v8::internal::Isolate::global_safepoint(), v8::internal::Isolate::heap(), isolate(), v8::internal::GlobalSafepoint::IterateClientIsolates(), and v8::internal::HeapVerifier::VerifyHeapIfEnabled().

Referenced by PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PostFinalizationRegistryCleanupTaskIfNeeded()

void v8::internal::Heap::PostFinalizationRegistryCleanupTaskIfNeeded ( )

Definition at line 7039 of file heap.cc.

7039  {
7040  // Only one cleanup task is posted at a time.
7043  return;
7044  }
7045  auto task = std::make_unique<FinalizationRegistryCleanupTask>(this);
7046  task_runner_->PostNonNestableTask(std::move(task));
7048 }

References HasDirtyJSFinalizationRegistries(), is_finalization_registry_cleanup_task_posted_, and task_runner_.

Referenced by v8::internal::FinalizationRegistryCleanupTask::RunInternal().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PrecedeWithFiller()

Tagged< HeapObject > v8::internal::Heap::PrecedeWithFiller ( Tagged< HeapObject object,
int  filler_size 
)

Definition at line 3157 of file heap.cc.

3158  {
3159  CreateFillerObjectAt(object.address(), filler_size);
3160  return HeapObject::FromAddress(object.address() + filler_size);
3161 }

References CreateFillerObjectAt(), and v8::internal::HeapObject::FromAddress().

Referenced by v8::internal::MainAllocator::AllocateFastAligned(), and v8::internal::ReadOnlySpace::TryAllocateLinearlyAligned().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PrecedeWithFillerBackground()

Tagged< HeapObject > v8::internal::Heap::PrecedeWithFillerBackground ( Tagged< HeapObject object,
int  filler_size 
)

Definition at line 3163 of file heap.cc.

3164  {
3166  WritableFreeSpace::ForNonExecutableMemory(object.address(), filler_size));
3167  return HeapObject::FromAddress(object.address() + filler_size);
3168 }

References CreateFillerObjectAtBackground(), v8::internal::WritableFreeSpace::ForNonExecutableMemory(), and v8::internal::HeapObject::FromAddress().

Referenced by AlignWithFillerBackground().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PreciseCollectAllGarbage()

void v8::internal::Heap::PreciseCollectAllGarbage ( GCFlags  gc_flags,
GarbageCollectionReason  gc_reason,
const GCCallbackFlags  gc_callback_flags = kNoGCCallbackFlags 
)

Definition at line 1446 of file heap.cc.

1448  {
1450  CollectAllGarbage(gc_flags, gc_reason, gc_callback_flags);
1451 }
V8_EXPORT_PRIVATE void FinalizeIncrementalMarkingAtomicallyIfRunning(GarbageCollectionReason gc_reason)
Definition: heap.cc:4064

References CollectAllGarbage(), and FinalizeIncrementalMarkingAtomicallyIfRunning().

Referenced by v8::internal::compiler::CompilationDependencies::Commit(), v8::internal::HeapObjectsMap::UpdateHeapObjectsMap(), and wasm::StoreImpl::~StoreImpl().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ pretenuring_handler()

PretenuringHandler* v8::internal::Heap::pretenuring_handler ( )
inline

Definition at line 1683 of file heap.h.

1683 { return &pretenuring_handler_; }

Referenced by v8::internal::ConcurrentMarking::FlushPretenuringFeedback(), and v8::internal::MinorMarkSweepCollector::MarkLiveObjects().

+ Here is the caller graph for this function:

◆ PrintFreeListsStats()

void v8::internal::Heap::PrintFreeListsStats ( )

Definition at line 678 of file heap.cc.

678  {
679  DCHECK(v8_flags.trace_gc_freelists);
680 
681  if (v8_flags.trace_gc_freelists_verbose) {
683  "Freelists statistics per Page: "
684  "[category: length || total free bytes]\n");
685  }
686 
687  std::vector<int> categories_lengths(
688  old_space()->free_list()->number_of_categories(), 0);
689  std::vector<size_t> categories_sums(
690  old_space()->free_list()->number_of_categories(), 0);
691  unsigned int pageCnt = 0;
692 
693  // This loops computes freelists lengths and sum.
694  // If v8_flags.trace_gc_freelists_verbose is enabled, it also prints
695  // the stats of each FreeListCategory of each Page.
696  for (PageMetadata* page : *old_space()) {
697  std::ostringstream out_str;
698 
699  if (v8_flags.trace_gc_freelists_verbose) {
700  out_str << "Page " << std::setw(4) << pageCnt;
701  }
702 
703  for (int cat = kFirstCategory;
704  cat <= old_space()->free_list()->last_category(); cat++) {
705  FreeListCategory* free_list =
706  page->free_list_category(static_cast<FreeListCategoryType>(cat));
707  int length = free_list->FreeListLength();
708  size_t sum = free_list->SumFreeList();
709 
710  if (v8_flags.trace_gc_freelists_verbose) {
711  out_str << "[" << cat << ": " << std::setw(4) << length << " || "
712  << std::setw(6) << sum << " ]"
713  << (cat == old_space()->free_list()->last_category() ? "\n"
714  : ", ");
715  }
716  categories_lengths[cat] += length;
717  categories_sums[cat] += sum;
718  }
719 
720  if (v8_flags.trace_gc_freelists_verbose) {
721  PrintIsolate(isolate_, "%s", out_str.str().c_str());
722  }
723 
724  pageCnt++;
725  }
726 
727  // Print statistics about old_space (pages, free/wasted/used memory...).
728  PrintIsolate(
729  isolate_,
730  "%d pages. Free space: %.1f MB (waste: %.2f). "
731  "Usage: %.1f/%.1f (MB) -> %.2f%%.\n",
732  pageCnt, static_cast<double>(old_space_->Available()) / MB,
733  static_cast<double>(old_space_->Waste()) / MB,
734  static_cast<double>(old_space_->Size()) / MB,
735  static_cast<double>(old_space_->Capacity()) / MB,
736  static_cast<double>(old_space_->Size()) / old_space_->Capacity() * 100);
737 
738  // Print global statistics of each FreeListCategory (length & sum).
740  "FreeLists global statistics: "
741  "[category: length || total free KB]\n");
742  std::ostringstream out_str;
743  for (int cat = kFirstCategory;
744  cat <= old_space()->free_list()->last_category(); cat++) {
745  out_str << "[" << cat << ": " << categories_lengths[cat] << " || "
746  << std::fixed << std::setprecision(2)
747  << static_cast<double>(categories_sums[cat]) / KB << " KB]"
748  << (cat == old_space()->free_list()->last_category() ? "\n" : ", ");
749  }
750  PrintIsolate(isolate_, "%s", out_str.str().c_str());
751 }
size_t Available() const override
size_t Size() const override
Definition: paged-spaces.h:170
static constexpr FreeListCategoryType kFirstCategory
Definition: free-list.h:39
int32_t FreeListCategoryType
Definition: free-list.h:37
void PrintIsolate(void *isolate, const char *format,...)
Definition: utils.cc:61

References v8::internal::PagedSpaceBase::Available(), v8::internal::PagedSpaceBase::Capacity(), v8::internal::DCHECK(), v8::internal::FreeListCategory::FreeListLength(), isolate_, v8::internal::KB, v8::internal::kFirstCategory, v8::internal::length, v8::internal::MB, old_space(), old_space_, v8::internal::PrintIsolate(), v8::internal::PagedSpaceBase::Size(), v8::internal::FreeListCategory::SumFreeList(), v8::internal::v8_flags, and v8::internal::PagedSpaceBase::Waste().

Referenced by v8::internal::GCTracer::NotifyFullSweepingCompletedAndStopCycleIfFinished(), and v8::internal::GCTracer::StopCycle().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PrintMaxMarkingLimitReached()

void v8::internal::Heap::PrintMaxMarkingLimitReached ( )
private

Definition at line 6118 of file heap.cc.

6118  {
6119  PrintF("\n### Maximum marking limit reached = %.02lf\n",
6120  max_marking_limit_reached_.load(std::memory_order_relaxed));
6121 }

References max_marking_limit_reached_, and v8::internal::PrintF().

Referenced by TearDown().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PrintMaxNewSpaceSizeReached()

void v8::internal::Heap::PrintMaxNewSpaceSizeReached ( )
private

Definition at line 6123 of file heap.cc.

6123  {
6124  PrintF("\n### Maximum new space size reached = %.02lf\n",
6126 }

References v8::internal::StressScavengeObserver::MaxNewSpaceSizeReached(), v8::internal::PrintF(), and stress_scavenge_observer_.

Referenced by TearDown().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PrintShortHeapStatistics()

void v8::internal::Heap::PrintShortHeapStatistics ( )

Definition at line 577 of file heap.cc.

577  {
578  if (!v8_flags.trace_gc_verbose) return;
580  "Memory allocator, used: %6zu KB"
581  ", available: %7zu KB\n",
582  memory_allocator()->Size() / KB,
583  memory_allocator()->Available() / KB);
585  "Read-only space, used: %6zu KB"
586  ", available: %7zu KB"
587  ", committed: %6zu KB\n",
588  read_only_space_->Size() / KB, size_t{0},
591  "New space, used: %6zu KB"
592  ", available:%c %7zu KB"
593  ", committed: %6zu KB\n",
594  NewSpaceSize() / KB,
595  (v8_flags.minor_ms && minor_sweeping_in_progress()) ? '*' : ' ',
596  new_space_->Available() / KB,
597  new_space_->CommittedMemory() / KB);
599  "New large object space, used: %6zu KB"
600  ", available: %7zu KB"
601  ", committed: %6zu KB\n",
606  "Old space, used: %6zu KB"
607  ", available:%c %7zu KB"
608  ", committed: %6zu KB\n",
609  old_space_->SizeOfObjects() / KB,
610  major_sweeping_in_progress() ? '*' : ' ',
611  old_space_->Available() / KB,
612  old_space_->CommittedMemory() / KB);
614  "Code space, used: %6zu KB"
615  ", available:%c %7zu KB"
616  ", committed: %6zu KB\n",
617  code_space_->SizeOfObjects() / KB,
618  major_sweeping_in_progress() ? '*' : ' ',
619  code_space_->Available() / KB,
620  code_space_->CommittedMemory() / KB);
622  "Large object space, used: %6zu KB"
623  ", available: %7zu KB"
624  ", committed: %6zu KB\n",
628  "Code large object space, used: %6zu KB"
629  ", available: %7zu KB"
630  ", committed: %6zu KB\n",
635  "Trusted space, used: %6zu KB"
636  ", available:%c %7zu KB"
637  ", committed: %6zu KB\n",
638  trusted_space_->SizeOfObjects() / KB,
639  major_sweeping_in_progress() ? '*' : ' ',
641  trusted_space_->CommittedMemory() / KB);
643  "Trusted large object space, used: %6zu KB"
644  ", available: %7zu KB"
645  ", committed: %6zu KB\n",
649  ReadOnlySpace* const ro_space = read_only_space_;
651  "All spaces, used: %6zu KB"
652  ", available:%c %7zu KB"
653  ", committed: %6zu KB\n",
654  (this->SizeOfObjects() + ro_space->Size()) / KB,
655  sweeping_in_progress() ? '*' : ' ', (this->Available()) / KB,
656  (this->CommittedMemory() + ro_space->CommittedMemory()) / KB);
657  const size_t chunks = memory_allocator()->GetPooledChunksCount();
658  PrintIsolate(isolate_, "Pool buffering %4zu chunk(s) of committed: %7zu KB\n",
659  chunks, (chunks * PageMetadata::kPageSize) / KB);
661  "External memory reported: %7" PRId64 " KB\n",
662  external_memory() / KB);
664  "Backing store memory: %7" PRIu64 " KB\n",
665  backing_store_bytes() / KB);
666  PrintIsolate(isolate_, "External memory global: %7zu KB\n",
669  "Total time spent in GC: %7.1f ms\n",
671  if (sweeping_in_progress()) {
673  "(*) Sweeping is still in progress, making available sizes "
674  "inaccurate.\n");
675  }
676 }
double InMillisecondsF() const
Definition: time.cc:226
size_t Available()
Definition: heap.cc:400
base::TimeDelta total_gc_time_ms_
Definition: heap.h:2324
uint64_t backing_store_bytes() const
Definition: heap.h:668
GetExternallyAllocatedMemoryInBytesCallback external_memory_callback_
Definition: heap.h:2309
size_t Available() const override
Definition: large-spaces.cc:60
V8_EXPORT_PRIVATE size_t GetPooledChunksCount()
size_t Available() const override
size_t Size() const override

References Available(), v8::internal::LargeObjectSpace::Available(), v8::internal::NewLargeObjectSpace::Available(), v8::internal::PagedSpaceBase::Available(), backing_store_bytes(), code_lo_space_, code_space_, CommittedMemory(), v8::internal::BaseSpace::CommittedMemory(), external_memory(), external_memory_callback_, v8::internal::MemoryAllocator::GetPooledChunksCount(), v8::base::TimeDelta::InMillisecondsF(), isolate_, v8::internal::KB, v8::internal::MutablePageMetadata::kPageSize, lo_space_, major_sweeping_in_progress(), memory_allocator(), minor_sweeping_in_progress(), new_lo_space_, new_space_, NewSpaceSize(), old_space_, v8::internal::PrintIsolate(), read_only_space_, v8::internal::ReadOnlySpace::Size(), SizeOfObjects(), v8::internal::LargeObjectSpace::SizeOfObjects(), sweeping_in_progress(), total_gc_time_ms_, trusted_lo_space_, trusted_space_, and v8::internal::v8_flags.

Referenced by v8::internal::GCTracer::StopObservablePause().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessAllocationSites()

void v8::internal::Heap::ProcessAllocationSites ( WeakObjectRetainer retainer)
private

Definition at line 3026 of file heap.cc.

3026  {
3027  Tagged<Object> allocation_site_obj =
3029  retainer);
3031  Cast<UnionOf<Undefined, AllocationSiteWithWeakNext>>(
3032  allocation_site_obj));
3033 }
template Tagged< Object > VisitWeakList< AllocationSiteWithWeakNext >(Heap *heap, Tagged< Object > list, WeakObjectRetainer *retainer)
requires HasCastImplementation< Holder, To, From > Holder< To > Cast(Holder< From > value, const v8::SourceLocation &loc=INIT_SOURCE_LOCATION_IN_DEBUG)
Definition: casting.h:122

References allocation_sites_list(), v8::internal::Cast(), set_allocation_sites_list(), and v8::internal::VisitWeakList< AllocationSiteWithWeakNext >().

Referenced by ProcessAllWeakReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessAllWeakReferences()

void v8::internal::Heap::ProcessAllWeakReferences ( WeakObjectRetainer retainer)
private

Definition at line 3013 of file heap.cc.

3013  {
3014  ProcessNativeContexts(retainer);
3015  ProcessAllocationSites(retainer);
3017 }
void ProcessNativeContexts(WeakObjectRetainer *retainer)
Definition: heap.cc:3019
void ProcessAllocationSites(WeakObjectRetainer *retainer)
Definition: heap.cc:3026
void ProcessDirtyJSFinalizationRegistries(WeakObjectRetainer *retainer)
Definition: heap.cc:3035

References ProcessAllocationSites(), ProcessDirtyJSFinalizationRegistries(), and ProcessNativeContexts().

+ Here is the call graph for this function:

◆ ProcessDirtyJSFinalizationRegistries()

void v8::internal::Heap::ProcessDirtyJSFinalizationRegistries ( WeakObjectRetainer retainer)
private

Definition at line 3035 of file heap.cc.

3035  {
3037  this, dirty_js_finalization_registries_list(), retainer);
3039  // If the list is empty, set the tail to undefined. Otherwise the tail is set
3040  // by WeakListVisitor<JSFinalizationRegistry>::VisitLiveObject.
3041  if (IsUndefined(head, isolate())) {
3043  }
3044 }
template Tagged< Object > VisitWeakList< JSFinalizationRegistry >(Heap *heap, Tagged< Object > list, WeakObjectRetainer *retainer)

References dirty_js_finalization_registries_list(), isolate(), set_dirty_js_finalization_registries_list(), set_dirty_js_finalization_registries_list_tail(), and v8::internal::VisitWeakList< JSFinalizationRegistry >().

Referenced by ProcessAllWeakReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessNativeContexts()

void v8::internal::Heap::ProcessNativeContexts ( WeakObjectRetainer retainer)
private

Definition at line 3019 of file heap.cc.

3019  {
3020  Tagged<Object> head =
3021  VisitWeakList<Context>(this, native_contexts_list(), retainer);
3022  // Update the head of the list of contexts.
3024 }
template Tagged< Object > VisitWeakList< Context >(Heap *heap, Tagged< Object > list, WeakObjectRetainer *retainer)

References native_contexts_list(), set_native_contexts_list(), and v8::internal::VisitWeakList< Context >().

Referenced by ProcessAllWeakReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessWeakListRoots()

void v8::internal::Heap::ProcessWeakListRoots ( WeakObjectRetainer retainer)
private

Definition at line 3046 of file heap.cc.

3046  {
3047  set_native_contexts_list(retainer->RetainAs(native_contexts_list()));
3049  Cast<UnionOf<Smi, Undefined, AllocationSiteWithWeakNext>>(
3050  retainer->RetainAs(allocation_sites_list())));
3052  retainer->RetainAs(dirty_js_finalization_registries_list()));
3054  retainer->RetainAs(dirty_js_finalization_registries_list_tail()));
3055 }

References allocation_sites_list(), v8::internal::Cast(), dirty_js_finalization_registries_list(), dirty_js_finalization_registries_list_tail(), native_contexts_list(), v8::internal::WeakObjectRetainer::RetainAs(), set_allocation_sites_list(), set_dirty_js_finalization_registries_list(), set_dirty_js_finalization_registries_list_tail(), and set_native_contexts_list().

+ Here is the call graph for this function:

◆ promoted_objects_size()

size_t v8::internal::Heap::promoted_objects_size ( )
inline

Definition at line 1362 of file heap.h.

1362 { return promoted_objects_size_; }

Referenced by v8::internal::GCTracer::PrintNVP().

+ Here is the caller graph for this function:

◆ PromotedSinceLastGC()

size_t v8::internal::Heap::PromotedSinceLastGC ( )
inlineprivate

Definition at line 2126 of file heap.h.

2126  {
2127  size_t old_generation_size = OldGenerationSizeOfObjects();
2128  return old_generation_size > old_generation_size_at_last_gc_
2129  ? old_generation_size - old_generation_size_at_last_gc_
2130  : 0;
2131  }

◆ PublishMainThreadPendingAllocations()

void v8::internal::Heap::PublishMainThreadPendingAllocations ( )

Definition at line 1040 of file heap.cc.

1040  {
1042 }

References allocator(), and v8::internal::HeapAllocator::PublishPendingAllocations().

Referenced by v8::internal::maglev::MaglevCompilationInfo::MaglevCompilationInfo(), v8::internal::compiler::PipelineCompilationJob::PrepareJobImpl(), and v8::internal::IncrementalMarking::Step().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ReachedHeapLimit()

bool v8::internal::Heap::ReachedHeapLimit ( )
private

Definition at line 1809 of file heap.cc.

1809 { return !CanExpandOldGeneration(0); }

References CanExpandOldGeneration().

Referenced by CheckHeapLimitReached(), CollectAllAvailableGarbage(), and CollectGarbageWithRetry().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ read_only_space()

◆ RecomputeLimits()

void v8::internal::Heap::RecomputeLimits ( GarbageCollector  collector,
base::TimeTicks  time 
)
private

Definition at line 2614 of file heap.cc.

2614  {
2615  if (IsYoungGenerationCollector(collector) &&
2617  return;
2618  }
2619  if (using_initial_limit()) {
2620  DCHECK(IsYoungGenerationCollector(collector));
2621  return;
2622  }
2623 
2624  auto new_limits = ComputeNewAllocationLimits(this);
2625  size_t new_old_generation_allocation_limit =
2626  new_limits.old_generation_allocation_limit;
2627  size_t new_global_allocation_limit = new_limits.global_allocation_limit;
2628 
2629  if (collector == GarbageCollector::MARK_COMPACTOR) {
2630  if (v8_flags.memory_balancer) {
2631  // Now recompute the new allocation limit.
2632  mb_->RecomputeLimits(new_limits.global_allocation_limit -
2633  new_limits.old_generation_allocation_limit,
2634  time);
2635  } else {
2637  new_limits.old_generation_allocation_limit,
2638  new_limits.global_allocation_limit);
2639  }
2640 
2643  tracer()->AverageMarkCompactMutatorUtilization());
2644  } else {
2646  new_old_generation_allocation_limit = std::min(
2647  new_old_generation_allocation_limit, old_generation_allocation_limit());
2648  new_global_allocation_limit =
2649  std::min(new_global_allocation_limit, global_allocation_limit());
2651  new_old_generation_allocation_limit, new_global_allocation_limit);
2652  }
2653 
2657 }
void CheckIneffectiveMarkCompact(size_t old_generation_size, double mutator_utilization)
Definition: heap.cc:3861
std::unique_ptr< MemoryBalancer > mb_
Definition: heap.h:2464
#define CHECK_GE(lhs, rhs)

References CHECK_EQ, CHECK_GE, CheckIneffectiveMarkCompact(), ComputeNewAllocationLimits(), v8::internal::DCHECK(), global_allocation_limit(), v8::internal::anonymous_namespace{heap.cc}::GlobalMemorySizeFromV8Size(), HasLowYoungGenerationAllocationRate(), IsYoungGenerationCollector(), v8::internal::MARK_COMPACTOR, max_global_memory_size_, max_old_generation_size_, mb_, old_generation_allocation_limit(), old_generation_allocation_limit_, OldGenerationConsumedBytes(), SetOldGenerationAndGlobalAllocationLimit(), v8::internal::time, tracer(), using_initial_limit(), and v8::internal::v8_flags.

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RecomputeLimitsAfterLoadingIfNeeded()

void v8::internal::Heap::RecomputeLimitsAfterLoadingIfNeeded ( )
private

Definition at line 2659 of file heap.cc.

2659  {
2661  return;
2662  }
2663 
2664  if ((OldGenerationSpaceAvailable() > 0) && (GlobalMemoryAvailable() > 0)) {
2665  // Only recompute limits if memory accumulated during loading may lead to
2666  // atomic GC. If there is still room to allocate, keep the current limits.
2669  return;
2670  }
2671 
2672  if (!incremental_marking()->IsMajorMarking()) {
2673  // Incremental marking should have started already but was delayed. Don't
2674  // update the limits yet to not delay starting incremental marking any
2675  // further. Limits will be updated on incremental marking start, with the
2676  // intention to give more slack and avoid an immediate large finalization
2677  // pause.
2678  return;
2679  }
2680 
2682 
2688  set_using_initial_limit(false);
2689 
2690  auto new_limits = ComputeNewAllocationLimits(this);
2691  size_t new_old_generation_allocation_limit =
2692  new_limits.old_generation_allocation_limit;
2693  size_t new_global_allocation_limit = new_limits.global_allocation_limit;
2694 
2695  new_old_generation_allocation_limit = std::max(
2696  new_old_generation_allocation_limit, old_generation_allocation_limit());
2697  new_global_allocation_limit =
2698  std::max(new_global_allocation_limit, global_allocation_limit());
2699  SetOldGenerationAndGlobalAllocationLimit(new_old_generation_allocation_limit,
2700  new_global_allocation_limit);
2701 
2705 }
bool AllocationLimitOvershotByLargeMargin() const
Definition: heap.cc:5450

References AllocationLimitOvershotByLargeMargin(), CHECK_EQ, CHECK_GE, ComputeNewAllocationLimits(), v8::internal::DCHECK(), embedder_size_at_last_gc_, EmbedderSizeOfObjects(), external_memory_, global_allocation_limit(), GlobalMemoryAvailable(), v8::internal::anonymous_namespace{heap.cc}::GlobalMemorySizeFromV8Size(), incremental_marking(), max_global_memory_size_, max_old_generation_size_, old_generation_allocation_limit(), old_generation_allocation_limit_, old_generation_size_at_last_gc_, old_generation_wasted_at_last_gc_, OldGenerationSizeOfObjects(), OldGenerationSpaceAvailable(), OldGenerationWastedBytes(), set_using_initial_limit(), SetOldGenerationAndGlobalAllocationLimit(), v8::internal::Heap::ExternalMemoryAccounting::total(), update_allocation_limits_after_loading_, v8::internal::Heap::ExternalMemoryAccounting::UpdateLowSinceMarkCompact(), and UpdateOldGenerationAllocationCounter().

Referenced by NotifyLoadingEnded(), and StartIncrementalMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RecordStats()

void v8::internal::Heap::RecordStats ( HeapStats stats)

Definition at line 5325 of file heap.cc.

5325  {
5326  stats->start_marker = HeapStats::kStartMarker;
5327  stats->end_marker = HeapStats::kEndMarker;
5328  stats->ro_space_size = read_only_space_->Size();
5329  stats->ro_space_capacity = read_only_space_->Capacity();
5330  stats->new_space_size = NewSpaceSize();
5331  stats->new_space_capacity = NewSpaceCapacity();
5332  stats->old_space_size = old_space_->SizeOfObjects();
5333  stats->old_space_capacity = old_space_->Capacity();
5334  stats->code_space_size = code_space_->SizeOfObjects();
5335  stats->code_space_capacity = code_space_->Capacity();
5336  stats->map_space_size = 0;
5337  stats->map_space_capacity = 0;
5338  stats->lo_space_size = lo_space_->Size();
5339  stats->code_lo_space_size = code_lo_space_->Size();
5340  isolate_->global_handles()->RecordStats(stats);
5341  stats->memory_allocator_size = memory_allocator()->Size();
5342  stats->memory_allocator_capacity =
5344  stats->os_error = base::OS::GetLastError();
5345  // TODO(leszeks): Include the string table in both current and peak usage.
5346  stats->malloced_memory = isolate_->allocator()->GetCurrentMemoryUsage();
5347  stats->malloced_peak_memory = isolate_->allocator()->GetMaxMemoryUsage();
5348  GetFromRingBuffer(stats->last_few_messages);
5349 }
static int GetLastError()
void RecordStats(HeapStats *stats)
static const int kStartMarker
Definition: heap.h:2562
static const int kEndMarker
Definition: heap.h:2563
void GetFromRingBuffer(char *buffer)
Definition: heap.cc:5311
AccountingAllocator * allocator()
Definition: isolate.h:2019

References v8::internal::Isolate::allocator(), v8::internal::MemoryAllocator::Available(), v8::internal::PagedSpaceBase::Capacity(), v8::internal::ReadOnlySpace::Capacity(), code_lo_space_, v8::internal::HeapStats::code_lo_space_size, code_space_, v8::internal::HeapStats::code_space_capacity, v8::internal::HeapStats::code_space_size, v8::internal::HeapStats::end_marker, v8::internal::AccountingAllocator::GetCurrentMemoryUsage(), GetFromRingBuffer(), v8::base::OS::GetLastError(), v8::internal::AccountingAllocator::GetMaxMemoryUsage(), v8::internal::Isolate::global_handles(), isolate_, v8::internal::HeapStats::kEndMarker, v8::internal::HeapStats::kStartMarker, v8::internal::HeapStats::last_few_messages, lo_space_, v8::internal::HeapStats::lo_space_size, v8::internal::HeapStats::malloced_memory, v8::internal::HeapStats::malloced_peak_memory, v8::internal::HeapStats::map_space_capacity, v8::internal::HeapStats::map_space_size, memory_allocator(), v8::internal::HeapStats::memory_allocator_capacity, v8::internal::HeapStats::memory_allocator_size, v8::internal::HeapStats::new_space_capacity, v8::internal::HeapStats::new_space_size, NewSpaceCapacity(), NewSpaceSize(), old_space_, v8::internal::HeapStats::old_space_capacity, v8::internal::HeapStats::old_space_size, v8::internal::HeapStats::os_error, read_only_space_, v8::internal::GlobalHandles::RecordStats(), v8::internal::HeapStats::ro_space_capacity, v8::internal::HeapStats::ro_space_size, v8::internal::MemoryAllocator::Size(), v8::internal::LargeObjectSpace::Size(), v8::internal::ReadOnlySpace::Size(), and v8::internal::HeapStats::start_marker.

+ Here is the call graph for this function:

◆ ReduceNewSpaceSize()

void v8::internal::Heap::ReduceNewSpaceSize ( )
private

Definition at line 4025 of file heap.cc.

4025  {
4026  if (!v8_flags.minor_ms) {
4027  const size_t reduced_capacity = ComputeReducedNewSpaceSize(new_space());
4028  semi_space_new_space()->Shrink(reduced_capacity);
4029  } else {
4030  // MinorMS starts shrinking new space as part of sweeping.
4032  }
4033  new_lo_space_->SetCapacity(new_space()->TotalCapacity());
4034 }
void Shrink(size_t new_capacity)
Definition: new-spaces.cc:477
size_t ComputeReducedNewSpaceSize(NewSpace *new_space)
Definition: heap.cc:3967

References v8::internal::anonymous_namespace{heap.cc}::ComputeReducedNewSpaceSize(), v8::internal::PagedNewSpace::FinishShrinking(), new_lo_space_, new_space(), paged_new_space(), semi_space_new_space(), v8::internal::NewLargeObjectSpace::SetCapacity(), v8::internal::SemiSpaceNewSpace::Shrink(), and v8::internal::v8_flags.

Referenced by ReduceNewSpaceSizeForTesting(), and ResizeNewSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ReduceNewSpaceSizeForTesting()

void v8::internal::Heap::ReduceNewSpaceSizeForTesting ( )

Definition at line 4006 of file heap.cc.

4006 { ReduceNewSpaceSize(); }
void ReduceNewSpaceSize()
Definition: heap.cc:4025

References ReduceNewSpaceSize().

+ Here is the call graph for this function:

◆ RegisterExternalString()

void v8::internal::Heap::RegisterExternalString ( Tagged< String string)
inline

Definition at line 213 of file heap-inl.h.

213  {
214  DCHECK(IsExternalString(string));
215  DCHECK(!IsThinString(string));
217 }
void AddString(Tagged< String > string)
Definition: heap-inl.h:361
constexpr bool IsThinString(InstanceType instance_type)

References v8::internal::Heap::ExternalStringTable::AddString(), v8::internal::DCHECK(), external_string_table_, v8::internal::InstanceTypeChecker::IsExternalString(), and v8::internal::InstanceTypeChecker::IsThinString().

Referenced by v8::internal::Factory::InternalizeExternalString(), v8::internal::String::MakeExternal(), v8::internal::String::MakeExternalDuringGC(), v8::internal::Factory::NewExternalStringFromOneByte(), v8::internal::Factory::NewExternalStringFromTwoByte(), and v8::internal::anonymous_namespace{deserializer.cc}::PostProcessExternalString().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RegisterStrongRoots()

StrongRootsEntry * v8::internal::Heap::RegisterStrongRoots ( const char *  label,
FullObjectSlot  start,
FullObjectSlot  end 
)

Definition at line 6976 of file heap.cc.

6978  {
6979  // We're either on the main thread, or in a background thread with an active
6980  // local heap.
6981  DCHECK(isolate()->CurrentLocalHeap()->IsRunning());
6982 
6984 
6985  StrongRootsEntry* entry = new StrongRootsEntry(label);
6986  entry->start = start;
6987  entry->end = end;
6988  entry->prev = nullptr;
6989  entry->next = strong_roots_head_;
6990 
6991  if (strong_roots_head_) {
6993  strong_roots_head_->prev = entry;
6994  }
6995  strong_roots_head_ = entry;
6996 
6997  return entry;
6998 }
base::Mutex strong_roots_mutex_
Definition: heap.h:2378
StrongRootsEntry * prev
Definition: heap.h:171

References v8::internal::DCHECK(), DCHECK_NULL, v8::internal::compiler::end(), v8::internal::StrongRootsEntry::end, isolate(), v8::internal::StrongRootsEntry::next, v8::internal::StrongRootsEntry::prev, v8::internal::StrongRootsEntry::start, strong_roots_head_, and strong_roots_mutex_.

Referenced by v8::internal::StrongRootAllocatorBase::allocate_impl(), and v8::internal::IdentityMapBase::InsertEntry().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RememberUnmappedPage()

void v8::internal::Heap::RememberUnmappedPage ( Address  page,
bool  compacted 
)

Definition at line 6947 of file heap.cc.

6947  {
6948  // Tag the page pointer to make it findable in the dump file.
6949  if (compacted) {
6950  page ^= 0xC1EAD & (PageMetadata::kPageSize - 1); // Cleared.
6951  } else {
6952  page ^= 0x1D1ED & (PageMetadata::kPageSize - 1); // I died.
6953  }
6957 }
Address remembered_unmapped_pages_[kRememberedUnmappedPages]
Definition: heap.h:2286
static const int kRememberedUnmappedPages
Definition: heap.h:1762
int remembered_unmapped_pages_index_
Definition: heap.h:2285

References v8::internal::MutablePageMetadata::kPageSize, kRememberedUnmappedPages, remembered_unmapped_pages_, and remembered_unmapped_pages_index_.

Referenced by Heap(), and v8::internal::MemoryAllocator::PreFreeMemory().

+ Here is the caller graph for this function:

◆ RemoveAllocationObserversFromAllSpaces()

void v8::internal::Heap::RemoveAllocationObserversFromAllSpaces ( AllocationObserver observer,
AllocationObserver new_space_observer 
)

Definition at line 1034 of file heap.cc.

1035  {
1036  DCHECK(observer && new_space_observer);
1037  allocator()->RemoveAllocationObserver(observer, new_space_observer);
1038 }
void RemoveAllocationObserver(AllocationObserver *observer, AllocationObserver *new_space_observer)

References allocator(), v8::internal::DCHECK(), and v8::internal::HeapAllocator::RemoveAllocationObserver().

Referenced by v8::internal::StressConcurrentAllocationObserver::Step(), TearDown(), and v8::internal::SamplingHeapProfiler::~SamplingHeapProfiler().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RemoveDirtyFinalizationRegistriesOnContext()

void v8::internal::Heap::RemoveDirtyFinalizationRegistriesOnContext ( Tagged< NativeContext context)

Definition at line 7097 of file heap.cc.

7098  {
7100 
7101  Isolate* isolate = this->isolate();
7102  Tagged<Object> prev = ReadOnlyRoots(isolate).undefined_value();
7104  while (!IsUndefined(current, isolate)) {
7105  Tagged<JSFinalizationRegistry> finalization_registry =
7106  Cast<JSFinalizationRegistry>(current);
7107  if (finalization_registry->native_context() == context) {
7108  if (IsUndefined(prev, isolate)) {
7110  finalization_registry->next_dirty());
7111  } else {
7112  Cast<JSFinalizationRegistry>(prev)->set_next_dirty(
7113  finalization_registry->next_dirty());
7114  }
7115  finalization_registry->set_scheduled_for_cleanup(false);
7116  current = finalization_registry->next_dirty();
7117  finalization_registry->set_next_dirty(
7118  ReadOnlyRoots(isolate).undefined_value());
7119  } else {
7120  prev = current;
7121  current = finalization_registry->next_dirty();
7122  }
7123  }
7125 }

References dirty_js_finalization_registries_list(), isolate(), v8::internal::anonymous_namespace{json-stringifier.cc}::no_gc, ReadOnlyRoots, set_dirty_js_finalization_registries_list(), and set_dirty_js_finalization_registries_list_tail().

Referenced by NotifyContextDisposed().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RemoveGCEpilogueCallback()

void v8::internal::Heap::RemoveGCEpilogueCallback ( v8::Isolate::GCCallbackWithData  callback,
void *  data 
)

Definition at line 6452 of file heap.cc.

6453  {
6454  gc_epilogue_callbacks_.Remove(callback, data);
6455 }
void Remove(CallbackType callback, void *data)
Definition: gc-callbacks.h:31

References gc_epilogue_callbacks_, and v8::internal::GCCallbacks::Remove().

Referenced by v8::Isolate::RemoveGCEpilogueCallback().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RemoveGCPrologueCallback()

void v8::internal::Heap::RemoveGCPrologueCallback ( v8::Isolate::GCCallbackWithData  callback,
void *  data 
)

Definition at line 6441 of file heap.cc.

6442  {
6443  gc_prologue_callbacks_.Remove(callback, data);
6444 }

References gc_prologue_callbacks_, and v8::internal::GCCallbacks::Remove().

Referenced by v8::Isolate::RemoveGCPrologueCallback().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RemoveHeapObjectAllocationTracker()

void v8::internal::Heap::RemoveHeapObjectAllocationTracker ( HeapObjectAllocationTracker tracker)

Definition at line 931 of file heap.cc.

932  {
933  allocation_trackers_.erase(std::remove(allocation_trackers_.begin(),
934  allocation_trackers_.end(), tracker),
935  allocation_trackers_.end());
936  if (allocation_trackers_.empty()) {
938  }
939  if (allocation_trackers_.empty() && v8_flags.inline_new) {
941  }
942 }
V8_EXPORT_PRIVATE void EnableInlineAllocation()
Definition: heap.cc:5736

References allocation_trackers_, EnableInlineAllocation(), isolate_, v8::internal::Isolate::UpdateLogObjectRelocation(), and v8::internal::v8_flags.

Referenced by v8::internal::HeapProfiler::StopHeapObjectsTracking(), v8::internal::Debug::StopSideEffectCheckMode(), and v8::internal::Heap::AllocationTrackerForDebugging::~AllocationTrackerForDebugging().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RemoveNearHeapLimitCallback()

void v8::internal::Heap::RemoveNearHeapLimitCallback ( v8::NearHeapLimitCallback  callback,
size_t  heap_limit 
)

Definition at line 4370 of file heap.cc.

4371  {
4372  for (size_t i = 0; i < near_heap_limit_callbacks_.size(); i++) {
4373  if (near_heap_limit_callbacks_[i].first == callback) {
4375  if (heap_limit) {
4376  RestoreHeapLimit(heap_limit);
4377  }
4378  return;
4379  }
4380  }
4381  UNREACHABLE();
4382 }
void RestoreHeapLimit(size_t heap_limit)
Definition: heap.h:707

References near_heap_limit_callbacks_, RestoreHeapLimit(), and v8::internal::UNREACHABLE().

Referenced by v8::Isolate::RemoveNearHeapLimitCallback().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ReplaceReadOnlySpace()

void v8::internal::Heap::ReplaceReadOnlySpace ( SharedReadOnlySpace shared_ro_space)

Definition at line 5860 of file heap.cc.

5860  {
5861  if (read_only_space_) {
5863  delete read_only_space_;
5864  }
5865 
5868 }
void SetReadOnlySpace(ReadOnlySpace *)
virtual V8_EXPORT_PRIVATE void TearDown(MemoryAllocator *memory_allocator)

References heap_allocator_, memory_allocator(), read_only_space_, v8::internal::HeapAllocator::SetReadOnlySpace(), space(), and v8::internal::ReadOnlySpace::TearDown().

Referenced by v8::internal::ReadOnlyArtifacts::ReinstallReadOnlySpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ReportStatisticsAfterGC()

void v8::internal::Heap::ReportStatisticsAfterGC ( )
private

Definition at line 812 of file heap.cc.

812  {
813  if (deferred_counters_.empty()) return;
814  // Move the contents into a new SmallVector first, in case
815  // {Isolate::CountUsage} puts the counters into {deferred_counters_} again.
816  decltype(deferred_counters_) to_report = std::move(deferred_counters_);
818  isolate()->CountUsage(base::VectorOf(to_report));
819 }
constexpr Vector< T > VectorOf(T *start, size_t size)
Definition: vector.h:359

References v8::internal::Isolate::CountUsage(), v8::internal::DCHECK(), deferred_counters_, v8::base::SmallVector< T, kSize, Allocator >::empty(), isolate(), and v8::base::VectorOf().

Referenced by GarbageCollectionEpilogue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ResetAllAllocationSitesDependentCode()

void v8::internal::Heap::ResetAllAllocationSitesDependentCode ( AllocationType  allocation)
private

Definition at line 3076 of file heap.cc.

3076  {
3077  DisallowGarbageCollection no_gc_scope;
3078  bool marked = false;
3079 
3082  [&marked, allocation, this](Tagged<AllocationSite> site) {
3083  if (site->GetAllocationType() == allocation) {
3084  site->ResetPretenureDecision();
3085  site->set_deopt_dependent_code(true);
3086  marked = true;
3087  pretenuring_handler_.RemoveAllocationSitePretenuringFeedback(site);
3088  return;
3089  }
3090  });
3091  if (marked) isolate_->stack_guard()->RequestDeoptMarkedAllocationSites();
3092 }

References allocation_sites_list(), ForeachAllocationSite(), isolate_, and v8::internal::Isolate::stack_guard().

Referenced by EvaluateOldSpaceLocalPretenuring().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ResetOldGenerationAndGlobalAllocationLimit()

void v8::internal::Heap::ResetOldGenerationAndGlobalAllocationLimit ( )
private

Definition at line 1586 of file heap.cc.

References configured_, DCHECK_IMPLIES, v8::internal::anonymous_namespace{heap.cc}::GlobalMemorySizeFromV8Size(), initial_old_generation_size_, initial_size_overwritten_, set_using_initial_limit(), and SetOldGenerationAndGlobalAllocationLimit().

Referenced by ConfigureHeap(), and NotifyContextDisposed().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ResizeArrayBufferExtension()

void v8::internal::Heap::ResizeArrayBufferExtension ( ArrayBufferExtension extension,
int64_t  delta 
)

Definition at line 4389 of file heap.cc.

4390  {
4391  // ArrayBufferSweeper is managing all counters and updating Heap counters.
4392  array_buffer_sweeper_->Resize(extension, delta);
4393 }

References array_buffer_sweeper_.

Referenced by v8::internal::ResizeHelper().

+ Here is the caller graph for this function:

◆ ResizeNewSpace()

void v8::internal::Heap::ResizeNewSpace ( )
private

Definition at line 3987 of file heap.cc.

3987  {
3988  DCHECK_IMPLIES(!v8_flags.minor_ms,
3990  const ResizeNewSpaceMode mode =
3993 
3994  switch (mode) {
3997  break;
4000  break;
4002  break;
4003  }
4004 }
ResizeNewSpaceMode ShouldResizeNewSpace()
Definition: heap.cc:3943

References DCHECK_IMPLIES, ExpandNewSpaceSize(), kGrow, kNone, kShrink, mode(), ReduceNewSpaceSize(), resize_new_space_mode_, ShouldResizeNewSpace(), and v8::internal::v8_flags.

Referenced by v8::internal::MarkCompactCollector::Finish(), and v8::internal::MinorMarkSweepCollector::Finish().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RestoreHeapLimit()

void v8::internal::Heap::RestoreHeapLimit ( size_t  heap_limit)
inline

Definition at line 707 of file heap.h.

707  {
708  // Do not set the limit lower than the live size + some slack.
709  size_t min_limit = SizeOfObjects() + SizeOfObjects() / 4;
711  std::min(max_old_generation_size(), std::max(heap_limit, min_limit)));
712  }

Referenced by RemoveNearHeapLimitCallback().

+ Here is the caller graph for this function:

◆ ResumeConcurrentThreadsInClients()

void v8::internal::Heap::ResumeConcurrentThreadsInClients ( std::vector< Isolate * >  paused_clients)
private

Definition at line 2473 of file heap.cc.

2474  {
2475  if (isolate()->is_shared_space_isolate()) {
2476  for (Isolate* client : paused_clients) {
2477  client->heap()->concurrent_marking()->Resume();
2478  }
2479  } else {
2480  DCHECK(paused_clients.empty());
2481  }
2482 }

References v8::internal::DCHECK(), and isolate().

Referenced by PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RightTrimArray()

template<typename Array >
void v8::internal::Heap::RightTrimArray ( Tagged< Array object,
int  new_capacity,
int  old_capacity 
)

Definition at line 3592 of file heap.cc.

3593  {
3594  DCHECK_EQ(old_capacity, object->capacity());
3595  DCHECK_LT(new_capacity, old_capacity);
3596  DCHECK_GE(new_capacity, 0);
3597 
3598  if constexpr (Array::kElementsAreMaybeObject) {
3599  // For MaybeObject elements, this function is safe to use only at the end
3600  // of the mark compact collection: When marking, we record the weak slots,
3601  // and shrinking invalidates them.
3603  }
3604 
3605  const int bytes_to_trim = (old_capacity - new_capacity) * Array::kElementSize;
3606 
3607  // Calculate location of new array end.
3608  const int old_size = Array::SizeFor(old_capacity);
3609  DCHECK_EQ(object->AllocatedSize(), old_size);
3610  Address old_end = object.address() + old_size;
3611  Address new_end = old_end - bytes_to_trim;
3612 
3613  const bool clear_slots = MayContainRecordedSlots(object);
3614 
3615  // Technically in new space this write might be omitted (except for debug
3616  // mode which iterates through the heap), but to play safer we still do it.
3617  // We do not create a filler for objects in a large object space.
3618  if (!IsLargeObject(object)) {
3620  object, old_size, old_size - bytes_to_trim,
3622  if (!v8_flags.black_allocated_pages) {
3623  Tagged<HeapObject> filler = HeapObject::FromAddress(new_end);
3624  // Clear the mark bits of the black area that belongs now to the filler.
3625  // This is an optimization. The sweeper will release black fillers anyway.
3626  if (incremental_marking()->black_allocation() &&
3627  marking_state()->IsMarked(filler)) {
3628  PageMetadata* page = PageMetadata::FromAddress(new_end);
3629  page->marking_bitmap()->ClearRange<AccessMode::ATOMIC>(
3631  MarkingBitmap::LimitAddressToIndex(new_end + bytes_to_trim));
3632  }
3633  }
3634  } else if (clear_slots) {
3635  // Large objects are not swept, so it is not necessary to clear the
3636  // recorded slot.
3638  (old_end - new_end) / kTaggedSize);
3639  }
3640 
3641  // Initialize header of the trimmed array. We are storing the new capacity
3642  // using release store after creating a filler for the left-over space to
3643  // avoid races with the sweeper thread.
3644  object->set_capacity(new_capacity, kReleaseStore);
3645 
3646  // Notify the heap object allocation tracker of change in object layout. The
3647  // array may not be moved during GC, and size has to be adjusted nevertheless.
3648  for (auto& tracker : allocation_trackers_) {
3649  tracker->UpdateObjectSizeEvent(object.address(),
3650  Array::SizeFor(new_capacity));
3651  }
3652 }
MarkingState * marking_state()
Definition: heap.h:1677
void NotifyObjectSizeChange(Tagged< HeapObject >, int old_size, int new_size, ClearRecordedSlots clear_recorded_slots)
Definition: heap.cc:4218
static constexpr MarkBitIndex LimitAddressToIndex(Address address)
Definition: marking-inl.h:180
static constexpr MarkBitIndex AddressToIndex(Address address)
Definition: marking-inl.h:174
static PageMetadata * FromAddress(Address addr)
SlotTraits::TObjectSlot ObjectSlot
Definition: globals.h:1237
void MemsetTagged(Tagged_t *start, Tagged< MaybeObject > value, size_t counter)
Definition: slots-inl.h:497
constexpr uint32_t kClearedFreeMemoryValue
Definition: globals.h:998
static constexpr ReleaseStoreTag kReleaseStore
Definition: globals.h:2944

References v8::internal::MarkingBitmap::AddressToIndex(), allocation_trackers_, v8::internal::ATOMIC, v8::internal::MarkingBitmap::ClearRange(), DCHECK_EQ, DCHECK_GE, DCHECK_LT, v8::internal::PageMetadata::FromAddress(), v8::internal::HeapObject::FromAddress(), gc_state(), incremental_marking(), IsLargeObject(), v8::internal::kClearedFreeMemoryValue, v8::internal::kNo, v8::kReleaseStore, v8::internal::kTaggedSize, v8::internal::kYes, v8::internal::MarkingBitmap::LimitAddressToIndex(), MARK_COMPACT, v8::internal::MutablePageMetadata::marking_bitmap(), marking_state(), v8::internal::anonymous_namespace{heap.cc}::MayContainRecordedSlots(), v8::internal::MemsetTagged(), NotifyObjectSizeChange(), and v8::internal::v8_flags.

Referenced by v8::internal::anonymous_namespace{elements.cc}::ElementsAccessorBase< Subclass, ElementsTraitsParam >::DecreaseLength(), v8::internal::anonymous_namespace{elements.cc}::FastElementsAccessor< Subclass, KindTraits >::DeleteAtEnd(), and v8::internal::TaggedArrayBase< Derived, ShapeT, Super >::RightTrim().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ roots_table()

RootsTable & v8::internal::Heap::roots_table ( )
inline

Definition at line 69 of file heap-inl.h.

69 { return isolate()->roots_table(); }
RootsTable & roots_table()
Definition: isolate.h:1265

References isolate(), and v8::internal::Isolate::roots_table().

Referenced by CreateEarlyReadOnlyMapsAndObjects(), CreateImportantReadOnlyObjects(), CreateLateReadOnlyNonJSReceiverMaps(), CreateReadOnlyObjects(), IterateRoots(), IterateSmiRoots(), NextScriptId(), SetFunctionsMarkedForManualOptimization(), SetMessageListeners(), SetRootMaterializedObjects(), and SetRootScriptList().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ safepoint()

◆ Scavenge()

void v8::internal::Heap::Scavenge ( )
private

Definition at line 2800 of file heap.cc.

2800  {
2802  DCHECK(!incremental_marking()->IsMarking());
2803 
2804  TRACE_GC(tracer(), GCTracer::Scope::SCAVENGER_SCAVENGE);
2806 
2807  // Implements Cheney's copying algorithm
2808  scavenger_collector_->CollectGarbage();
2809 
2811 }

References v8::internal::DCHECK(), DCHECK_NOT_NULL, incremental_marking(), new_space(), NOT_IN_GC, SCAVENGE, scavenger_collector_, SetGCState(), TRACE_GC, and tracer().

Referenced by PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SelectGarbageCollector()

GarbageCollector v8::internal::Heap::SelectGarbageCollector ( AllocationSpace  space,
GarbageCollectionReason  gc_reason,
const char **  reason 
) const
private

Definition at line 473 of file heap.cc.

475  {
477  DCHECK_NE(static_cast<bool>(new_space()),
478  v8_flags.sticky_mark_bits.value());
480  *reason = "MinorMS finalization for starting major GC";
482  }
483 
485  DCHECK_NE(static_cast<bool>(new_space()),
486  v8_flags.sticky_mark_bits.value());
488  *reason = "Concurrent MinorMS needs finalization";
490  }
491  // Is global GC requested?
492  if (space != NEW_SPACE && space != NEW_LO_SPACE) {
493  isolate_->counters()->gc_compactor_caused_by_request()->Increment();
494  *reason = "GC in old space requested";
496  }
497 
498  if (v8_flags.gc_global || ShouldStressCompaction() || !use_new_space()) {
499  *reason = "GC in old space forced by flags";
501  }
502 
503  if (incremental_marking()->IsMajorMarking()) {
504  *reason = "Incremental marking forced finalization";
506  }
507 
509  isolate_->counters()
510  ->gc_compactor_caused_by_oldspace_exhaustion()
511  ->Increment();
512  *reason = "scavenge might not succeed";
514  }
515 
516  DCHECK(!v8_flags.single_generation);
517  DCHECK(!v8_flags.gc_global);
518  // Default
519  *reason = nullptr;
520  return YoungGenerationCollector();
521 }
static GarbageCollector YoungGenerationCollector()
Definition: heap.h:368
V8_EXPORT_PRIVATE bool CanPromoteYoungAndExpandOldGeneration(size_t size) const
Definition: heap.cc:426

References CanPromoteYoungAndExpandOldGeneration(), v8::internal::Isolate::counters(), v8::internal::DCHECK(), DCHECK_NE, incremental_marking(), isolate_, v8::internal::kFinalizeConcurrentMinorMS, v8::internal::kFinalizeMinorMSForMajorGC, v8::internal::MARK_COMPACTOR, v8::internal::MINOR_MARK_SWEEPER, v8::internal::NEW_LO_SPACE, v8::internal::NEW_SPACE, new_space(), ShouldReduceMemory(), ShouldStressCompaction(), space(), use_new_space(), v8::internal::v8_flags, and YoungGenerationCollector().

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ semi_space_new_space()

SemiSpaceNewSpace * v8::internal::Heap::semi_space_new_space ( ) const
inline

Definition at line 430 of file heap-inl.h.

430  {
432 }
static SemiSpaceNewSpace * From(NewSpace *space)
Definition: new-spaces.h:259

References v8::internal::SemiSpaceNewSpace::From(), and new_space().

Referenced by CanPromoteYoungAndExpandOldGeneration(), v8::internal::SemiSpace::MoveQuarantinedPage(), v8::internal::GCTracer::PrintNVP(), ReduceNewSpaceSize(), YoungGenerationConsumedBytes(), and v8::internal::anonymous_namespace{scavenger.cc}::YoungGenerationConservativeStackVisitor::~YoungGenerationConservativeStackVisitor().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SemiSpaceSizeFromYoungGenerationSize()

size_t v8::internal::Heap::SemiSpaceSizeFromYoungGenerationSize ( size_t  young_generation_size)
static

Definition at line 320 of file heap.cc.

321  {
322  return young_generation_size /
324 }
static constexpr size_t kNewLargeObjectSpaceToSemiSpaceRatio
Definition: heap.h:320

References kNewLargeObjectSpaceToSemiSpaceRatio, and v8::internal::anonymous_namespace{heap.cc}::NumberOfSemiSpaces().

Referenced by ConfigureHeap().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ set_allocation_sites_list()

void v8::internal::Heap::set_allocation_sites_list ( Tagged< UnionOf< Smi, Undefined, AllocationSiteWithWeakNext >>  object)
inline

Definition at line 509 of file heap.h.

510  {
511  allocation_sites_list_ = object;
512  }

Referenced by CreateMutableHeapObjects(), v8::internal::StartupDeserializer::DeserializeIntoIsolate(), v8::internal::Factory::NewAllocationSite(), ProcessAllocationSites(), and ProcessWeakListRoots().

+ Here is the caller graph for this function:

◆ set_dirty_js_finalization_registries_list()

void v8::internal::Heap::set_dirty_js_finalization_registries_list ( Tagged< Object object)
inline

Definition at line 518 of file heap.h.

518  {
520  }

Referenced by CreateMutableHeapObjects(), DequeueDirtyJSFinalizationRegistry(), v8::internal::StartupDeserializer::DeserializeIntoIsolate(), EnqueueDirtyJSFinalizationRegistry(), ProcessDirtyJSFinalizationRegistries(), ProcessWeakListRoots(), and RemoveDirtyFinalizationRegistriesOnContext().

+ Here is the caller graph for this function:

◆ set_dirty_js_finalization_registries_list_tail()

void v8::internal::Heap::set_dirty_js_finalization_registries_list_tail ( Tagged< Object object)
inline

Definition at line 524 of file heap.h.

524  {
526  }

Referenced by CreateMutableHeapObjects(), DequeueDirtyJSFinalizationRegistry(), v8::internal::StartupDeserializer::DeserializeIntoIsolate(), EnqueueDirtyJSFinalizationRegistry(), ProcessDirtyJSFinalizationRegistries(), ProcessWeakListRoots(), and RemoveDirtyFinalizationRegistriesOnContext().

+ Here is the caller graph for this function:

◆ set_force_gc_on_next_allocation()

void v8::internal::Heap::set_force_gc_on_next_allocation ( )
inlineprivate

Definition at line 2102 of file heap.h.

2102  {
2104  }

◆ set_force_oom()

void v8::internal::Heap::set_force_oom ( bool  value)
inlineprivate

Definition at line 2101 of file heap.h.

2101 { force_oom_ = value; }

References v8::internal::value.

◆ set_is_finalization_registry_cleanup_task_posted()

void v8::internal::Heap::set_is_finalization_registry_cleanup_task_posted ( bool  posted)
inline

Definition at line 945 of file heap.h.

945  {
947  }

Referenced by v8::internal::FinalizationRegistryCleanupTask::RunInternal().

+ Here is the caller graph for this function:

◆ set_native_contexts_list()

void v8::internal::Heap::set_native_contexts_list ( Tagged< Object object)
inline

Definition at line 500 of file heap.h.

500  {
501  native_contexts_list_.store(object.ptr(), std::memory_order_release);
502  }

Referenced by CreateMutableHeapObjects(), v8::internal::StartupDeserializer::DeserializeIntoIsolate(), Heap(), ProcessNativeContexts(), and ProcessWeakListRoots().

+ Here is the caller graph for this function:

◆ set_old_generation_allocation_counter_at_last_gc()

void v8::internal::Heap::set_old_generation_allocation_counter_at_last_gc ( size_t  new_value)
inline

Definition at line 1406 of file heap.h.

1406  {
1408  }

◆ set_using_initial_limit()

void v8::internal::Heap::set_using_initial_limit ( bool  value)
inlineprivate

Definition at line 1995 of file heap.h.

1995  {
1996  using_initial_limit_.store(value, std::memory_order_relaxed);
1997  }
std::atomic< bool > using_initial_limit_
Definition: heap.h:2170

References v8::internal::value.

Referenced by EnsureMinimumRemainingAllocationLimit(), MarkCompact(), RecomputeLimitsAfterLoadingIfNeeded(), and ResetOldGenerationAndGlobalAllocationLimit().

+ Here is the caller graph for this function:

◆ SetBasicBlockProfilingData()

void v8::internal::Heap::SetBasicBlockProfilingData ( DirectHandle< ArrayList list)

Definition at line 173 of file heap.cc.

173  {
174  set_basic_block_profiling_data(*list);
175 }

◆ SetBuiltinsConstantsTable()

void v8::internal::Heap::SetBuiltinsConstantsTable ( Tagged< FixedArray cache)

Definition at line 7027 of file heap.cc.

7027  {
7028  set_builtins_constants_table(cache);
7029 }

Referenced by v8::internal::BuiltinsConstantsTableBuilder::Finalize().

+ Here is the caller graph for this function:

◆ SetConstructStubCreateDeoptPCOffset()

void v8::internal::Heap::SetConstructStubCreateDeoptPCOffset ( int  pc_offset)

Definition at line 142 of file heap.cc.

142  {
143  DCHECK_EQ(Smi::zero(), construct_stub_create_deopt_pc_offset());
144  set_construct_stub_create_deopt_pc_offset(Smi::FromInt(pc_offset));
145 }

References DCHECK_EQ, v8::internal::Smi::FromInt(), and v8::internal::Smi::zero().

+ Here is the call graph for this function:

◆ SetConstructStubInvokeDeoptPCOffset()

void v8::internal::Heap::SetConstructStubInvokeDeoptPCOffset ( int  pc_offset)

Definition at line 147 of file heap.cc.

147  {
148  DCHECK_EQ(Smi::zero(), construct_stub_invoke_deopt_pc_offset());
149  set_construct_stub_invoke_deopt_pc_offset(Smi::FromInt(pc_offset));
150 }

References DCHECK_EQ, v8::internal::Smi::FromInt(), and v8::internal::Smi::zero().

+ Here is the call graph for this function:

◆ SetDeoptPCOffsetAfterAdaptShadowStack()

void v8::internal::Heap::SetDeoptPCOffsetAfterAdaptShadowStack ( int  pc_offset)

Definition at line 152 of file heap.cc.

152  {
153  DCHECK((Smi::zero() == deopt_pc_offset_after_adapt_shadow_stack()) ||
154  (pc_offset == deopt_pc_offset_after_adapt_shadow_stack().value()));
155  set_deopt_pc_offset_after_adapt_shadow_stack(Smi::FromInt(pc_offset));
156 }

References v8::internal::DCHECK(), v8::internal::Smi::FromInt(), v8::internal::value, and v8::internal::Smi::zero().

+ Here is the call graph for this function:

◆ SetDetachedContexts()

void v8::internal::Heap::SetDetachedContexts ( Tagged< WeakArrayList detached_contexts)

Definition at line 7031 of file heap.cc.

7031  {
7032  set_detached_contexts(detached_contexts);
7033 }

Referenced by v8::internal::anonymous_namespace{startup-serializer.cc}::SanitizeIsolateScope::SanitizeIsolateScope().

+ Here is the caller graph for this function:

◆ SetDoubleStringCache()

void v8::internal::Heap::SetDoubleStringCache ( Tagged< DoubleStringCache cache)
inline

Definition at line 139 of file heap-inl.h.

139  {
140  set_double_string_cache(cache);
141 }

Referenced by v8::internal::DoubleStringCache::Set().

+ Here is the caller graph for this function:

◆ SetEmbedderRootsHandler()

void v8::internal::Heap::SetEmbedderRootsHandler ( EmbedderRootsHandler handler)

Definition at line 6226 of file heap.cc.

6226  {
6227  embedder_roots_handler_ = handler;
6228 }

References embedder_roots_handler_.

Referenced by v8::Isolate::SetEmbedderRootsHandler().

+ Here is the caller graph for this function:

◆ SetFunctionsMarkedForManualOptimization()

void v8::internal::Heap::SetFunctionsMarkedForManualOptimization ( Tagged< Object bytecode)
inline

Definition at line 129 of file heap-inl.h.

129  {
130  DCHECK(IsObjectHashTable(hash_table) || IsUndefined(hash_table, isolate()));
131  roots_table()[RootIndex::kFunctionsMarkedForManualOptimization] =
132  hash_table.ptr();
133 }

References v8::internal::DCHECK(), isolate(), v8::internal::TaggedImpl< kRefType, StorageType >::ptr(), and roots_table().

Referenced by v8::internal::Snapshot::ClearReconstructableDataForSerialization(), and v8::internal::ManualOptimizationTable::MarkFunctionForManualOptimization().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetGCState()

void v8::internal::Heap::SetGCState ( HeapState  state)

Definition at line 523 of file heap.cc.

523  {
524  gc_state_.store(state, std::memory_order_relaxed);
525 }

References gc_state_.

Referenced by MarkCompact(), MarkCompactEpilogue(), MinorMarkSweep(), Scavenge(), and StartTearDown().

+ Here is the caller graph for this function:

◆ SetGetExternallyAllocatedMemoryInBytesCallback()

void v8::internal::Heap::SetGetExternallyAllocatedMemoryInBytesCallback ( GetExternallyAllocatedMemoryInBytesCallback  callback)
inline

Definition at line 1019 of file heap.h.

1020  {
1021  external_memory_callback_ = callback;
1022  }

Referenced by v8::Isolate::SetGetExternallyAllocatedMemoryInBytesCallback(), and SetUpSpaces().

+ Here is the caller graph for this function:

◆ SetInterpreterEntryReturnPCOffset()

void v8::internal::Heap::SetInterpreterEntryReturnPCOffset ( int  pc_offset)

Definition at line 158 of file heap.cc.

158  {
159  DCHECK_EQ(Smi::zero(), interpreter_entry_return_pc_offset());
160  set_interpreter_entry_return_pc_offset(Smi::FromInt(pc_offset));
161 }

References DCHECK_EQ, v8::internal::Smi::FromInt(), and v8::internal::Smi::zero().

+ Here is the call graph for this function:

◆ SetIsMarkingFlag()

void v8::internal::Heap::SetIsMarkingFlag ( bool  value)
private

Definition at line 7389 of file heap.cc.

7389  {
7391 }

References v8::internal::IsolateData::is_marking_flag_, isolate(), v8::internal::Isolate::isolate_data(), and v8::internal::value.

Referenced by v8::internal::Isolate::Init(), v8::internal::IncrementalMarking::StartMarkingMajor(), v8::internal::IncrementalMarking::StartMarkingMinor(), and v8::internal::IncrementalMarking::Stop().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetIsMinorMarkingFlag()

void v8::internal::Heap::SetIsMinorMarkingFlag ( bool  value)
private

Definition at line 7397 of file heap.cc.

7397  {
7399 }

References v8::internal::IsolateData::is_minor_marking_flag_, isolate(), v8::internal::Isolate::isolate_data(), and v8::internal::value.

Referenced by v8::internal::IncrementalMarking::StartMarkingMinor(), and v8::internal::IncrementalMarking::Stop().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetMessageListeners()

void v8::internal::Heap::SetMessageListeners ( Tagged< ArrayList value)
inline

Definition at line 125 of file heap-inl.h.

125  {
126  roots_table()[RootIndex::kMessageListeners] = value.ptr();
127 }

References roots_table(), and v8::internal::value.

Referenced by v8::Isolate::AddMessageListenerWithErrorLevel().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetNewSpaceAllocationCounterForTesting()

void v8::internal::Heap::SetNewSpaceAllocationCounterForTesting ( size_t  new_value)
inline

Definition at line 1389 of file heap.h.

1389  {
1390  new_space_allocation_counter_ = new_value;
1391  }

◆ SetOldGenerationAndGlobalAllocationLimit()

void v8::internal::Heap::SetOldGenerationAndGlobalAllocationLimit ( size_t  new_old_generation_allocation_limit,
size_t  new_global_allocation_limit,
const char *  reason = __builtin_FUNCTION() 
)
private

Definition at line 1558 of file heap.cc.

1560  {
1561  CHECK_GE(new_global_allocation_limit, new_old_generation_allocation_limit);
1562 #if defined(V8_USE_PERFETTO)
1563  TRACE_COUNTER(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
1564  perfetto::CounterTrack(V8HeapTrait::kName,
1565  perfetto::ThreadTrack::Current()),
1566  new_old_generation_allocation_limit);
1567  TRACE_COUNTER(TRACE_DISABLED_BY_DEFAULT("v8.gc"),
1568  perfetto::CounterTrack(GlobalMemoryTrait::kName,
1569  perfetto::ThreadTrack::Current()),
1570  new_global_allocation_limit);
1571 #endif
1572  old_generation_allocation_limit_.store(new_old_generation_allocation_limit,
1573  std::memory_order_relaxed);
1574  global_allocation_limit_.store(new_global_allocation_limit,
1575  std::memory_order_relaxed);
1576  if (v8_flags.trace_gc_nvp) [[unlikely]] {
1577  isolate()->PrintWithTimestamp(
1578  "action=SetOldGenerationAndGlobalAllocationLimit "
1579  "old_generation_allocation_limit=%zu global_allocation_limit=%zu "
1580  "reason=%s\n",
1581  new_old_generation_allocation_limit, new_global_allocation_limit,
1582  reason);
1583  }
1584 }
static constexpr char kName[]
static constexpr char kName[]
#define TRACE_DISABLED_BY_DEFAULT(name)

References CHECK_GE, global_allocation_limit_, isolate(), v8::internal::V8HeapTrait::kName, v8::internal::GlobalMemoryTrait::kName, old_generation_allocation_limit_, TRACE_DISABLED_BY_DEFAULT, and v8::internal::v8_flags.

Referenced by EnsureMinimumRemainingAllocationLimit(), EnsureSweepingCompleted(), RecomputeLimits(), RecomputeLimitsAfterLoadingIfNeeded(), v8::internal::MemoryBalancer::RefreshLimit(), ResetOldGenerationAndGlobalAllocationLimit(), and ShrinkOldGenerationAllocationLimitIfNotConfigured().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetOldGenerationAndGlobalMaximumSize()

void v8::internal::Heap::SetOldGenerationAndGlobalMaximumSize ( size_t  max_old_generation_size)
private

Definition at line 1551 of file heap.cc.

1552  {
1554  std::memory_order_relaxed);
1556 }

References v8::internal::anonymous_namespace{heap.cc}::GlobalMemorySizeFromV8Size(), max_global_memory_size_, max_old_generation_size(), and max_old_generation_size_.

Referenced by CollectGarbage(), ConfigureHeap(), and InvokeNearHeapLimitCallback().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetRootMaterializedObjects()

void v8::internal::Heap::SetRootMaterializedObjects ( Tagged< FixedArray objects)
inline

Definition at line 117 of file heap-inl.h.

117  {
118  roots_table()[RootIndex::kMaterializedObjects] = objects.ptr();
119 }

References v8::internal::TaggedImpl< kRefType, StorageType >::ptr(), and roots_table().

Referenced by v8::internal::MaterializedObjectStore::EnsureStackEntries().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetRootNoScriptSharedFunctionInfos()

void v8::internal::Heap::SetRootNoScriptSharedFunctionInfos ( Tagged< Object value)
inline

◆ SetRootScriptList()

void v8::internal::Heap::SetRootScriptList ( Tagged< Object value)
inline

Definition at line 121 of file heap-inl.h.

121  {
122  roots_table()[RootIndex::kScriptList] = value.ptr();
123 }

References roots_table(), and v8::internal::value.

Referenced by v8::internal::ObjectDeserializer::CommitPostProcessedObjects(), v8::internal::BackgroundCompileTask::FinalizeScript(), and v8::internal::CodeSerializer::FinishOffThreadDeserialize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetSerializedGlobalProxySizes()

void v8::internal::Heap::SetSerializedGlobalProxySizes ( Tagged< FixedArray sizes)

Definition at line 168 of file heap.cc.

168  {
169  DCHECK(isolate()->serializer_enabled());
170  set_serialized_global_proxy_sizes(sizes);
171 }

References v8::internal::DCHECK(), and isolate().

Referenced by v8::internal::SnapshotCreatorImpl::CreateBlob().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetSerializedObjects()

void v8::internal::Heap::SetSerializedObjects ( Tagged< HeapObject objects)

Definition at line 163 of file heap.cc.

163  {
164  DCHECK(isolate()->serializer_enabled());
165  set_serialized_objects(objects);
166 }

References v8::internal::DCHECK(), and isolate().

Referenced by v8::internal::SnapshotCreatorImpl::AddData(), and v8::internal::anonymous_namespace{snapshot.cc}::ConvertSerializedObjectsToFixedArray().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetSmiStringCache()

void v8::internal::Heap::SetSmiStringCache ( Tagged< SmiStringCache cache)
inline

Definition at line 135 of file heap-inl.h.

135  {
136  set_smi_string_cache(cache);
137 }

Referenced by v8::internal::SmiStringCache::Set().

+ Here is the caller graph for this function:

◆ SetStackStart()

void v8::internal::Heap::SetStackStart ( )

Definition at line 6248 of file heap.cc.

6248  {
6249  // If no main thread local heap has been set up (we're still in the
6250  // deserialization process), we don't need to set the stack start.
6251  if (main_thread_local_heap_ == nullptr) return;
6252  stack().SetStackStart();
6253 }
void SetStackStart()
Definition: stack.h:38

References main_thread_local_heap_, heap::base::Stack::SetStackStart(), and stack().

Referenced by v8::internal::Isolate::Deinitialize(), v8::internal::Isolate::Enter(), and SetUp().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetUp()

void v8::internal::Heap::SetUp ( LocalHeap main_thread_local_heap)

Definition at line 5743 of file heap.cc.

5743  {
5749 
5750  // Set the stack start for the main thread that sets up the heap.
5751  SetStackStart();
5752 
5753 #ifdef V8_ENABLE_ALLOCATION_TIMEOUT
5754  heap_allocator_->UpdateAllocationTimeout();
5755 #endif // V8_ENABLE_ALLOCATION_TIMEOUT
5756 
5757  // Initialize heap spaces and initial maps and objects.
5758  //
5759  // If the heap is not yet configured (e.g. through the API), configure it.
5760  // Configuration is based on the flags new-space-size (really the semispace
5761  // size) and old-space-size if set or the initial values of semispace_size_
5762  // and old_generation_size_ otherwise.
5764 
5766  reinterpret_cast<uintptr_t>(v8::internal::GetRandomMmapAddr()) &
5767  ~kMmapRegionMask;
5768 
5769  v8::PageAllocator* code_page_allocator;
5770  if (isolate_->RequiresCodeRange() || code_range_size_ != 0) {
5771  const size_t requested_size =
5773  // When a target requires the code range feature, we put all code objects in
5774  // a contiguous range of virtual address space, so that they can call each
5775  // other with near calls.
5776 #ifdef V8_COMPRESS_POINTERS
5777  // When pointer compression is enabled, isolates in the same group share the
5778  // same CodeRange, owned by the IsolateGroup.
5779  code_range_ = isolate_->isolate_group()->EnsureCodeRange(requested_size);
5780 #else
5781  // Otherwise, each isolate has its own CodeRange, owned by the heap.
5782  code_range_ = std::make_unique<CodeRange>();
5783  if (!code_range_->InitReservation(isolate_->page_allocator(),
5784  requested_size, false)) {
5786  isolate_, "Failed to reserve virtual memory for CodeRange");
5787  }
5788 #endif // V8_COMPRESS_POINTERS
5789 
5790  LOG(isolate_,
5791  NewEvent("CodeRange",
5792  reinterpret_cast<void*>(code_range_->reservation()->address()),
5793  code_range_size_));
5794 
5795  isolate_->AddCodeRange(code_range_->reservation()->region().begin(),
5796  code_range_->reservation()->region().size());
5797  code_page_allocator = code_range_->page_allocator();
5798  } else {
5799  code_page_allocator = isolate_->page_allocator();
5800  }
5801 
5802  v8::PageAllocator* trusted_page_allocator;
5803 #ifdef V8_ENABLE_SANDBOX
5804  trusted_page_allocator =
5805  TrustedRange::GetProcessWideTrustedRange()->page_allocator();
5806 #else
5807  trusted_page_allocator = isolate_->page_allocator();
5808 #endif
5809 
5811  reinterpret_cast<v8::Isolate*>(isolate()));
5812 
5813  collection_barrier_.reset(new CollectionBarrier(this, this->task_runner_));
5814 
5815  // Set up memory allocator.
5816  memory_allocator_.reset(new MemoryAllocator(
5817  isolate_, code_page_allocator, trusted_page_allocator, MaxReserved()));
5818 
5819  sweeper_.reset(new Sweeper(this));
5820 
5822 
5823  scavenger_collector_.reset(new ScavengerCollector(this));
5825  ephemeron_remembered_set_.reset(new EphemeronRememberedSet());
5826 
5827  incremental_marking_.reset(
5828  new IncrementalMarking(this, mark_compact_collector_->weak_objects()));
5829 
5830  if (v8_flags.concurrent_marking || v8_flags.parallel_marking) {
5831  concurrent_marking_.reset(
5832  new ConcurrentMarking(this, mark_compact_collector_->weak_objects()));
5833  } else {
5834  concurrent_marking_.reset(new ConcurrentMarking(this, nullptr));
5835  }
5836 
5837  // Set up layout tracing callback.
5838  if (V8_UNLIKELY(v8_flags.trace_gc_heap_layout)) {
5840  if (V8_UNLIKELY(!v8_flags.trace_gc_heap_layout_ignore_minor_gc)) {
5841  gc_type = static_cast<v8::GCType>(gc_type | kGCTypeScavenge |
5843  }
5845  nullptr);
5847  nullptr);
5848  }
5849 }
A V8 memory page allocator.
Definition: v8-platform.h:455
static void GCEpiloguePrintHeapLayout(v8::Isolate *isolate, v8::GCType gc_type, v8::GCCallbackFlags flags, void *data)
static void GCProloguePrintHeapLayout(v8::Isolate *isolate, v8::GCType gc_type, v8::GCCallbackFlags flags, void *data)
void AddGCEpilogueCallback(v8::Isolate::GCCallbackWithData callback, GCType gc_type_filter, void *data)
Definition: heap.cc:6446
void ConfigureHeapDefault()
Definition: heap.cc:5320
void AddGCPrologueCallback(v8::Isolate::GCCallbackWithData callback, GCType gc_type_filter, void *data)
Definition: heap.cc:6435
friend class MinorMarkSweepCollector
Definition: heap.h:2511
friend class ScavengerCollector
Definition: heap.h:2526
friend class IncrementalMarking
Definition: heap.h:2499
friend class ConcurrentMarking
Definition: heap.h:2488
friend class MarkCompactCollector
Definition: heap.h:2507
V8_EXPORT_PRIVATE void SetStackStart()
Definition: heap.cc:6248
CodeRange * EnsureCodeRange(size_t requested_size)
IsolateGroup * isolate_group() const
Definition: isolate.h:1245
void AddCodeRange(Address begin, size_t length_in_bytes)
Definition: isolate.cc:7544
v8::PageAllocator * page_allocator() const
Definition: isolate.cc:4237
bool RequiresCodeRange() const
Definition: isolate.cc:7549
HeapAllocator heap_allocator_
Definition: local-heap.h:406
constexpr size_t kMaximalCodeRangeSize
Definition: globals.h:509
@ kGCTypeMinorMarkSweep
Definition: v8-callbacks.h:156
@ kGCTypeScavenge
Definition: v8-callbacks.h:155
@ kGCTypeMarkSweepCompact
Definition: v8-callbacks.h:157

References v8::internal::Isolate::AddCodeRange(), AddGCEpilogueCallback(), AddGCPrologueCallback(), code_range_, code_range_size_, collection_barrier_, concurrent_marking_, ConcurrentMarking, configured_, ConfigureHeapDefault(), DCHECK_NOT_NULL, DCHECK_NULL, v8::internal::IsolateGroup::EnsureCodeRange(), ephemeron_remembered_set_, v8::internal::V8::FatalProcessOutOfMemory(), v8::internal::HeapLayoutTracer::GCEpiloguePrintHeapLayout(), v8::internal::HeapLayoutTracer::GCProloguePrintHeapLayout(), v8::internal::V8::GetCurrentPlatform(), v8::Platform::GetForegroundTaskRunner(), v8::internal::GetRandomMmapAddr(), heap_allocator_, v8::internal::LocalHeap::heap_allocator_, incremental_marking_, IncrementalMarking, isolate(), isolate_, v8::internal::Isolate::isolate_group(), v8::kGCTypeMarkSweepCompact, v8::kGCTypeMinorMarkSweep, v8::kGCTypeScavenge, v8::internal::kMaximalCodeRangeSize, kMmapRegionMask, LOG, main_thread_local_heap(), main_thread_local_heap_, mark_compact_collector_, MarkCompactCollector, MaxReserved(), memory_allocator_, minor_mark_sweep_collector_, MinorMarkSweepCollector, mmap_region_base_, v8::internal::Isolate::page_allocator(), v8::internal::Isolate::RequiresCodeRange(), scavenger_collector_, ScavengerCollector, SetStackStart(), Sweeper, sweeper_, task_runner_, v8::internal::v8_flags, and V8_UNLIKELY.

Referenced by v8::internal::Isolate::Init().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetUpFromReadOnlyHeap()

void v8::internal::Heap::SetUpFromReadOnlyHeap ( ReadOnlyHeap ro_heap)

Definition at line 5851 of file heap.cc.

5851  {
5852  DCHECK_NOT_NULL(ro_heap);
5853  DCHECK_IMPLIES(read_only_space_ != nullptr,
5854  read_only_space_ == ro_heap->read_only_space());
5855  DCHECK_NULL(space_[RO_SPACE].get());
5856  read_only_space_ = ro_heap->read_only_space();
5858 }

References DCHECK_IMPLIES, DCHECK_NOT_NULL, DCHECK_NULL, heap_allocator_, v8::internal::ReadOnlyHeap::read_only_space(), read_only_space_, v8::internal::RO_SPACE, v8::internal::HeapAllocator::SetReadOnlySpace(), and space_.

Referenced by v8::internal::Isolate::SetUpFromReadOnlyArtifacts().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetUpSpaces()

void v8::internal::Heap::SetUpSpaces ( LinearAllocationArea new_allocation_info,
LinearAllocationArea old_allocation_info 
)

Definition at line 5967 of file heap.cc.

5968  {
5969  // Ensure SetUpFromReadOnlySpace has been ran.
5971 
5972  if (v8_flags.sticky_mark_bits) {
5973  space_[OLD_SPACE] = std::make_unique<StickySpace>(this);
5974  old_space_ = static_cast<OldSpace*>(space_[OLD_SPACE].get());
5975  } else {
5976  space_[OLD_SPACE] = std::make_unique<OldSpace>(this);
5977  old_space_ = static_cast<OldSpace*>(space_[OLD_SPACE].get());
5978  }
5979 
5980  if (!v8_flags.single_generation) {
5981  if (!v8_flags.sticky_mark_bits) {
5982  if (v8_flags.minor_ms) {
5983  space_[NEW_SPACE] = std::make_unique<PagedNewSpace>(
5986  } else {
5987  space_[NEW_SPACE] = std::make_unique<SemiSpaceNewSpace>(
5990  }
5991  new_space_ = static_cast<NewSpace*>(space_[NEW_SPACE].get());
5992  }
5993 
5994  space_[NEW_LO_SPACE] =
5995  std::make_unique<NewLargeObjectSpace>(this, NewSpaceCapacity());
5996  new_lo_space_ =
5997  static_cast<NewLargeObjectSpace*>(space_[NEW_LO_SPACE].get());
5998  }
5999 
6000  space_[CODE_SPACE] = std::make_unique<CodeSpace>(this);
6001  code_space_ = static_cast<CodeSpace*>(space_[CODE_SPACE].get());
6002 
6003  space_[LO_SPACE] = std::make_unique<OldLargeObjectSpace>(this);
6004  lo_space_ = static_cast<OldLargeObjectSpace*>(space_[LO_SPACE].get());
6005 
6006  space_[CODE_LO_SPACE] = std::make_unique<CodeLargeObjectSpace>(this);
6007  code_lo_space_ =
6008  static_cast<CodeLargeObjectSpace*>(space_[CODE_LO_SPACE].get());
6009 
6010  space_[TRUSTED_SPACE] = std::make_unique<TrustedSpace>(this);
6011  trusted_space_ = static_cast<TrustedSpace*>(space_[TRUSTED_SPACE].get());
6012 
6013  space_[TRUSTED_LO_SPACE] = std::make_unique<TrustedLargeObjectSpace>(this);
6015  static_cast<TrustedLargeObjectSpace*>(space_[TRUSTED_LO_SPACE].get());
6016 
6017  if (isolate()->is_shared_space_isolate()) {
6018  DCHECK(!v8_flags.sticky_mark_bits);
6019 
6020  space_[SHARED_SPACE] = std::make_unique<SharedSpace>(this);
6021  shared_space_ = static_cast<SharedSpace*>(space_[SHARED_SPACE].get());
6022 
6023  space_[SHARED_LO_SPACE] = std::make_unique<SharedLargeObjectSpace>(this);
6025  static_cast<SharedLargeObjectSpace*>(space_[SHARED_LO_SPACE].get());
6026 
6027  space_[SHARED_TRUSTED_SPACE] = std::make_unique<SharedTrustedSpace>(this);
6029  static_cast<SharedTrustedSpace*>(space_[SHARED_TRUSTED_SPACE].get());
6030 
6032  std::make_unique<SharedTrustedLargeObjectSpace>(this);
6033  shared_trusted_lo_space_ = static_cast<SharedTrustedLargeObjectSpace*>(
6035  }
6036 
6037  if (isolate()->has_shared_space()) {
6039  shared_allocation_space_ = heap->shared_space_;
6040  shared_lo_allocation_space_ = heap->shared_lo_space_;
6041 
6042  shared_trusted_allocation_space_ = heap->shared_trusted_space_;
6043  shared_trusted_lo_allocation_space_ = heap->shared_trusted_lo_space_;
6044  }
6045 
6046  main_thread_local_heap()->SetUpMainThread(new_allocation_info,
6047  old_allocation_info);
6048 
6049  base::TimeTicks startup_time = base::TimeTicks::Now();
6050 
6051  tracer_.reset(new GCTracer(this, startup_time));
6052  array_buffer_sweeper_.reset(new ArrayBufferSweeper(this));
6053  memory_measurement_.reset(new MemoryMeasurement(isolate()));
6054  if (v8_flags.memory_reducer) memory_reducer_.reset(new MemoryReducer(this));
6056  live_object_stats_.reset(new ObjectStats(this));
6057  dead_object_stats_.reset(new ObjectStats(this));
6058  }
6061  std::make_unique<Heap::AllocationTrackerForDebugging>(this);
6062  }
6063 
6064  LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
6065  LOG(isolate_, IntPtrTEvent("heap-available", Available()));
6066 
6068 
6069  if (new_space() || v8_flags.sticky_mark_bits) {
6070  minor_gc_job_.reset(new MinorGCJob(this));
6071  }
6072 
6073  if (v8_flags.stress_marking > 0) {
6075  }
6076  if (IsStressingScavenge()) {
6077  stress_scavenge_observer_ = new StressScavengeObserver(this);
6080  }
6081 
6082  if (v8_flags.memory_balancer) {
6083  mb_.reset(new MemoryBalancer(this, startup_time));
6084  }
6085 }
SharedTrustedSpace * shared_trusted_allocation_space_
Definition: heap.h:2225
PagedSpace * shared_allocation_space_
Definition: heap.h:2223
friend class MinorGCJob
Definition: heap.h:2509
friend class OldLargeObjectSpace
Definition: heap.h:2504
void SetGetExternallyAllocatedMemoryInBytesCallback(GetExternallyAllocatedMemoryInBytesCallback callback)
Definition: heap.h:1019
std::unique_ptr< AllocationTrackerForDebugging > allocation_tracker_for_debugging_
Definition: heap.h:2346
OldLargeObjectSpace * shared_lo_allocation_space_
Definition: heap.h:2224
std::unique_ptr< GCTracer > tracer_
Definition: heap.h:2329
friend class NewSpace
Definition: heap.h:2514
friend class NewLargeObjectSpace
Definition: heap.h:2513
friend class ArrayBufferSweeper
Definition: heap.h:2486
SharedTrustedLargeObjectSpace * shared_trusted_lo_allocation_space_
Definition: heap.h:2226
size_t Capacity()
Definition: heap.cc:326
friend class GCTracer
Definition: heap.h:2494
friend class MemoryBalancer
Definition: heap.h:2508
bool has_shared_space() const
Definition: isolate.h:2343
void SetUpMainThread(LinearAllocationArea &new_allocation_info, LinearAllocationArea &old_allocation_info)
Definition: local-heap.cc:138
V8_EXPORT_PRIVATE void AddAllocationObserver(AllocationObserver *observer)
static void ReturnNull(const v8::FunctionCallbackInfo< v8::Value > &info)

References v8::internal::MainAllocator::AddAllocationObserver(), allocation_tracker_for_debugging_, allocator(), array_buffer_sweeper_, ArrayBufferSweeper, Available(), Capacity(), v8::internal::CODE_LO_SPACE, code_lo_space_, v8::internal::CODE_SPACE, code_space_, v8::internal::DCHECK(), DCHECK_NOT_NULL, dead_object_stats_, GCTracer, v8::internal::Isolate::has_shared_space(), v8::internal::Isolate::heap(), initial_semispace_size_, v8::internal::TracingFlags::is_gc_stats_enabled(), v8::internal::Isolate::is_shared_space_isolate(), v8::internal::Heap::AllocationTrackerForDebugging::IsNeeded(), isolate(), isolate_, IsStressingScavenge(), live_object_stats_, v8::internal::LO_SPACE, lo_space_, LOG, main_thread_local_heap(), max_semi_space_size_, mb_, memory_measurement_, memory_reducer_, MemoryBalancer, min_semi_space_size_, minor_gc_job_, MinorGCJob, v8::internal::NEW_LO_SPACE, new_lo_space_, v8::internal::NEW_SPACE, new_space(), new_space_, v8::internal::HeapAllocator::new_space_allocator(), NewSpaceCapacity(), NextStressMarkingLimit(), v8::base::TimeTicks::Now(), v8::internal::OLD_SPACE, old_space_, read_only_space_, v8::internal::ReturnNull(), SetGetExternallyAllocatedMemoryInBytesCallback(), v8::internal::LocalHeap::SetUpMainThread(), shared_allocation_space_, shared_lo_allocation_space_, v8::internal::SHARED_LO_SPACE, shared_lo_space_, v8::internal::SHARED_SPACE, shared_space_, v8::internal::Isolate::shared_space_isolate(), shared_trusted_allocation_space_, shared_trusted_lo_allocation_space_, v8::internal::SHARED_TRUSTED_LO_SPACE, shared_trusted_lo_space_, v8::internal::SHARED_TRUSTED_SPACE, shared_trusted_space_, space_, stress_marking_percentage_, stress_scavenge_observer_, tracer_, v8::internal::TRUSTED_LO_SPACE, trusted_lo_space_, v8::internal::TRUSTED_SPACE, trusted_space_, v8::internal::v8_flags, and V8_UNLIKELY.

Referenced by v8::internal::Isolate::Init().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ shared_allocation_space()

PagedSpace* v8::internal::Heap::shared_allocation_space ( ) const
inline

Definition at line 793 of file heap.h.

793  {
795  }

Referenced by v8::internal::EvacuationAllocator::Finalize(), and v8::internal::HeapAllocator::Setup().

+ Here is the caller graph for this function:

◆ shared_lo_allocation_space()

OldLargeObjectSpace* v8::internal::Heap::shared_lo_allocation_space ( ) const
inline

Definition at line 796 of file heap.h.

796  {
798  }

Referenced by v8::internal::HeapAllocator::Setup().

+ Here is the caller graph for this function:

◆ shared_lo_space()

SharedLargeObjectSpace* v8::internal::Heap::shared_lo_space ( ) const
inline

Definition at line 779 of file heap.h.

779 { return shared_lo_space_; }

Referenced by DeactivateMajorGCInProgressFlag(), OldGenerationSizeOfObjects(), v8::internal::IncrementalMarking::OldGenerationSizeOfObjects(), and Unmark().

+ Here is the caller graph for this function:

◆ shared_space()

SharedSpace* v8::internal::Heap::shared_space ( ) const
inline

Definition at line 776 of file heap.h.

776 { return shared_space_; }

Referenced by DeactivateMajorGCInProgressFlag(), EnsureSweepingCompleted(), OldGenerationSizeOfObjects(), v8::internal::MarkCompactCollector::StartCompaction(), Unmark(), and v8::internal::MarkCompactCollector::VerifyMarking().

+ Here is the caller graph for this function:

◆ shared_trusted_allocation_space()

SharedTrustedSpace* v8::internal::Heap::shared_trusted_allocation_space ( ) const
inline

Definition at line 799 of file heap.h.

799  {
801  }

Referenced by v8::internal::HeapAllocator::Setup().

+ Here is the caller graph for this function:

◆ shared_trusted_lo_allocation_space()

SharedTrustedLargeObjectSpace* v8::internal::Heap::shared_trusted_lo_allocation_space ( ) const
inline

Definition at line 802 of file heap.h.

802  {
804  }

Referenced by v8::internal::HeapAllocator::Setup().

+ Here is the caller graph for this function:

◆ shared_trusted_lo_space()

SharedTrustedLargeObjectSpace* v8::internal::Heap::shared_trusted_lo_space ( ) const
inline

Definition at line 789 of file heap.h.

789  {
791  }

◆ shared_trusted_space()

SharedTrustedSpace* v8::internal::Heap::shared_trusted_space ( ) const
inline

Definition at line 783 of file heap.h.

783  {
784  return shared_trusted_space_;
785  }

Referenced by EnsureSweepingCompleted().

+ Here is the caller graph for this function:

◆ SharedHeapContains()

bool v8::internal::Heap::SharedHeapContains ( Tagged< HeapObject value) const

Definition at line 4514 of file heap.cc.

4514  {
4516  if (shared_allocation_space_->Contains(value)) return true;
4517  if (shared_lo_allocation_space_->Contains(value)) return true;
4518  if (shared_trusted_allocation_space_->Contains(value)) return true;
4520  }
4521 
4522  return false;
4523 }

References v8::internal::PagedSpaceBase::Contains(), v8::internal::LargeObjectSpace::Contains(), shared_allocation_space_, shared_lo_allocation_space_, shared_trusted_allocation_space_, shared_trusted_lo_allocation_space_, and v8::internal::value.

+ Here is the call graph for this function:

◆ ShouldCurrentGCKeepAgesUnchanged()

bool v8::internal::Heap::ShouldCurrentGCKeepAgesUnchanged ( ) const
inline

Definition at line 1420 of file heap.h.

1420  {
1422  }

Referenced by v8::internal::MarkCompactCollector::StartMarking(), and v8::internal::ConcurrentMarking::TryScheduleJob().

+ Here is the caller graph for this function:

◆ ShouldExpandOldGenerationOnSlowAllocation()

bool v8::internal::Heap::ShouldExpandOldGenerationOnSlowAllocation ( LocalHeap local_heap,
AllocationOrigin  origin 
)
private

Definition at line 5502 of file heap.cc.

5503  {
5504  if (always_allocate() || OldGenerationSpaceAvailable() > 0) return true;
5505  // We reached the old generation allocation limit.
5506 
5507  // Allocations in the GC should always succeed if possible.
5508  if (origin == AllocationOrigin::kGC) return true;
5509 
5510  // Background threads need to be allowed to allocate without GC after teardown
5511  // was initiated.
5512  if (gc_state() == TEAR_DOWN) return true;
5513 
5514  // Allocations need to succeed during isolate deserialization. With shared
5515  // heap allocations, a client isolate may perform shared heap allocations
5516  // during isolate deserialization as well.
5517  if (!deserialization_complete() ||
5518  !local_heap->heap()->deserialization_complete()) {
5519  return true;
5520  }
5521 
5522  // Make it more likely that retry of allocations succeeds.
5523  if (local_heap->IsRetryOfFailedAllocation()) return true;
5524 
5525  // Background thread requested GC, allocation should fail
5526  if (CollectionRequested()) return false;
5527 
5528  if (ShouldOptimizeForMemoryUsage()) return false;
5529 
5530  if (ShouldOptimizeForLoadTime()) return true;
5531 
5532  if (incremental_marking()->IsMajorMarking() &&
5534  return false;
5535  }
5536 
5537  if (incremental_marking()->IsStopped() &&
5539  // We cannot start incremental marking.
5540  return false;
5541  }
5542  return true;
5543 }
IncrementalMarkingLimit IncrementalMarkingLimitReached()
Definition: heap.cc:5640

References AllocationLimitOvershotByLargeMargin(), always_allocate(), CollectionRequested(), deserialization_complete(), gc_state(), v8::internal::LocalHeap::heap(), incremental_marking(), IncrementalMarkingLimitReached(), v8::internal::LocalHeap::IsRetryOfFailedAllocation(), v8::internal::kGC, kNoLimit, OldGenerationSpaceAvailable(), ShouldOptimizeForLoadTime(), ShouldOptimizeForMemoryUsage(), and TEAR_DOWN.

+ Here is the call graph for this function:

◆ ShouldExpandYoungGenerationOnSlowAllocation()

bool v8::internal::Heap::ShouldExpandYoungGenerationOnSlowAllocation ( size_t  allocation_size)
private

Definition at line 5548 of file heap.cc.

5548  {
5550 
5551  if (always_allocate()) return true;
5552 
5553  if (gc_state() == TEAR_DOWN) return true;
5554 
5555  if (!CanPromoteYoungAndExpandOldGeneration(allocation_size)) {
5556  // Assuming all of new space is alive, doing a full GC and promoting all
5557  // objects should still succeed. Don't let new space grow if it means it
5558  // will exceed the available size of old space.
5559  return false;
5560  }
5561 
5562  if (incremental_marking()->IsMajorMarking() &&
5564  // Allocate a new page during full GC incremental marking to avoid
5565  // prematurely finalizing the incremental GC. Once the full GC is over, new
5566  // space will be empty and capacity will be reset.
5567  return true;
5568  }
5569 
5570  return false;
5571 }

References AllocationLimitOvershotByLargeMargin(), always_allocate(), CanPromoteYoungAndExpandOldGeneration(), v8::internal::DCHECK(), deserialization_complete(), gc_state(), incremental_marking(), and TEAR_DOWN.

+ Here is the call graph for this function:

◆ ShouldOptimizeForBattery()

bool v8::internal::Heap::ShouldOptimizeForBattery ( ) const

Definition at line 468 of file heap.cc.

468  {
469  return v8_flags.optimize_gc_for_battery ||
471 }
bool BatterySaverModeEnabled()
Definition: isolate.h:2151

References v8::internal::Isolate::BatterySaverModeEnabled(), isolate(), and v8::internal::v8_flags.

Referenced by v8::internal::ConcurrentMarking::GetMajorMaxConcurrency(), and v8::internal::ConcurrentMarking::GetMinorMaxConcurrency().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ShouldOptimizeForLoadTime()

bool v8::internal::Heap::ShouldOptimizeForLoadTime ( ) const
private

Definition at line 5490 of file heap.cc.

5490  {
5491  double load_start_time = load_start_time_ms_.load(std::memory_order_relaxed);
5492  return load_start_time != kLoadTimeNotLoading &&
5494  MonotonicallyIncreasingTimeInMs() < load_start_time + kMaxLoadTimeMs;
5495 }
static const int kMaxLoadTimeMs
Definition: heap.h:1977

References AllocationLimitOvershotByLargeMargin(), kLoadTimeNotLoading, kMaxLoadTimeMs, load_start_time_ms_, and MonotonicallyIncreasingTimeInMs().

Referenced by CollectGarbage(), IncrementalMarkingLimitReached(), and ShouldExpandOldGenerationOnSlowAllocation().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ShouldOptimizeForMemoryUsage()

bool v8::internal::Heap::ShouldOptimizeForMemoryUsage ( )

Definition at line 3898 of file heap.cc.

3898  {
3899  const size_t kOldGenerationSlack = max_old_generation_size() / 8;
3902  !CanExpandOldGeneration(kOldGenerationSlack);
3903 }
bool MemorySaverModeEnabled()
Definition: isolate.h:2158
v8::Isolate::Priority priority()
Definition: isolate.h:2122

References CanExpandOldGeneration(), HighMemoryPressure(), isolate(), v8::Isolate::kBestEffort, max_old_generation_size(), v8::internal::Isolate::MemorySaverModeEnabled(), and v8::internal::Isolate::priority().

Referenced by v8::internal::MarkCompactCollector::ComputeEvacuationHeuristics(), CurrentHeapGrowingMode(), IncrementalMarkingLimitReached(), and ShouldExpandOldGenerationOnSlowAllocation().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ShouldReduceMemory()

◆ ShouldResizeNewSpace()

Heap::ResizeNewSpaceMode v8::internal::Heap::ShouldResizeNewSpace ( )
private

Definition at line 3943 of file heap.cc.

3943  {
3944  if (ShouldReduceMemory()) {
3945  return (v8_flags.predictable) ? ResizeNewSpaceMode::kNone
3947  }
3948 
3949  static const size_t kLowAllocationThroughput = 1000;
3950  const double allocation_throughput =
3951  tracer_->AllocationThroughputInBytesPerMillisecond();
3952  const bool should_shrink = !v8_flags.predictable &&
3953  (allocation_throughput != 0) &&
3954  (allocation_throughput < kLowAllocationThroughput);
3955 
3956  const bool should_grow =
3959 
3960  if (should_grow) survived_since_last_expansion_ = 0;
3961 
3962  if (should_grow == should_shrink) return ResizeNewSpaceMode::kNone;
3964 }

References kGrow, kNone, kShrink, v8::internal::NewSpace::MaximumCapacity(), new_space_, ShouldReduceMemory(), survived_since_last_expansion_, v8::internal::NewSpace::TotalCapacity(), tracer_, and v8::internal::v8_flags.

Referenced by ResizeNewSpace(), and StartResizeNewSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ShouldStressCompaction()

bool v8::internal::Heap::ShouldStressCompaction ( ) const
private

Definition at line 5732 of file heap.cc.

5732  {
5733  return v8_flags.stress_compaction && (gc_count_ & 1) != 0;
5734 }

References gc_count_, and v8::internal::v8_flags.

Referenced by IncrementalMarkingLimitReached(), and SelectGarbageCollector().

+ Here is the caller graph for this function:

◆ ShouldUseBackgroundThreads()

bool v8::internal::Heap::ShouldUseBackgroundThreads ( ) const

Definition at line 454 of file heap.cc.

454  {
455  return !v8_flags.single_threaded_gc_in_background ||
457 }
bool EfficiencyModeEnabled()
Definition: isolate.h:2134

References v8::internal::Isolate::EfficiencyModeEnabled(), isolate(), and v8::internal::v8_flags.

Referenced by v8::internal::MarkCompactCollector::MaybeEnableBackgroundThreadsInCycle(), v8::internal::ArrayBufferSweeper::RequestSweep(), v8::internal::MinorMarkSweepCollector::StartMarking(), v8::internal::MarkCompactCollector::StartMarking(), and StartMinorMSConcurrentMarkingIfNeeded().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ShouldUseIncrementalMarking()

bool v8::internal::Heap::ShouldUseIncrementalMarking ( ) const

Definition at line 459 of file heap.cc.

459  {
460  if (v8_flags.single_threaded_gc_in_background &&
461  isolate()->EfficiencyModeEnabled()) {
462  return v8_flags.incremental_marking_for_gc_in_background;
463  } else {
464  return true;
465  }
466 }

References isolate(), and v8::internal::v8_flags.

Referenced by v8::internal::IncrementalMarking::CanAndShouldBeStarted().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ShouldUsePrecisePinningForMajorGC()

bool v8::internal::Heap::ShouldUsePrecisePinningForMajorGC ( ) const
inline

Definition at line 400 of file heap.h.

400  {
401  return v8_flags.precise_object_pinning;
402  }

References v8::internal::v8_flags.

◆ ShouldUsePrecisePinningForMinorGC()

bool v8::internal::Heap::ShouldUsePrecisePinningForMinorGC ( ) const
inline

Definition at line 397 of file heap.h.

397  {
398  return v8_flags.scavenger_precise_object_pinning;
399  }

References v8::internal::v8_flags.

Referenced by v8::internal::ScavengerCollector::CollectGarbage().

+ Here is the caller graph for this function:

◆ ShrinkOldGenerationAllocationLimitIfNotConfigured()

void v8::internal::Heap::ShrinkOldGenerationAllocationLimitIfNotConfigured ( )
private

Definition at line 3225 of file heap.cc.

3225  {
3227  tracer()->SurvivalEventsRecorded()) {
3228  base::MutexGuard guard(old_space()->mutex());
3229  const size_t minimum_growing_step =
3232  size_t new_old_generation_allocation_limit =
3233  std::max(OldGenerationConsumedBytes() + minimum_growing_step,
3234  static_cast<size_t>(
3235  static_cast<double>(old_generation_allocation_limit()) *
3236  (tracer()->AverageSurvivalRatio() / 100)));
3237  new_old_generation_allocation_limit = std::min(
3238  new_old_generation_allocation_limit, old_generation_allocation_limit());
3239  size_t new_global_allocation_limit = std::max(
3240  GlobalConsumedBytes() + minimum_growing_step,
3241  static_cast<size_t>(static_cast<double>(global_allocation_limit()) *
3242  (tracer()->AverageSurvivalRatio() / 100)));
3243  new_global_allocation_limit =
3244  std::min(new_global_allocation_limit, global_allocation_limit());
3246  new_old_generation_allocation_limit, new_global_allocation_limit);
3247  }
3248 }
HeapGrowingMode CurrentHeapGrowingMode()
Definition: heap.cc:5578
static size_t MinimumAllocationLimitGrowingStep(Heap::HeapGrowingMode growing_mode)

References CurrentHeapGrowingMode(), global_allocation_limit(), GlobalConsumedBytes(), initial_size_overwritten_, v8::internal::MemoryController< Trait >::MinimumAllocationLimitGrowingStep(), old_generation_allocation_limit(), old_space(), OldGenerationConsumedBytes(), SetOldGenerationAndGlobalAllocationLimit(), tracer(), and using_initial_limit().

Referenced by PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SizeOfObjects()

size_t v8::internal::Heap::SizeOfObjects ( )

Definition at line 1008 of file heap.cc.

1008  {
1009  size_t total = 0;
1010 
1011  for (SpaceIterator it(this); it.HasNext();) {
1012  total += it.Next()->SizeOfObjects();
1013  }
1014  return total;
1015 }

References v8::internal::SpaceIterator::HasNext().

Referenced by CollectGarbageOnMemoryPressure(), v8::internal::Heap::DevToolsTraceEventScope::DevToolsTraceEventScope(), EvaluateOldSpaceLocalPretenuring(), GarbageCollectionEpilogue(), MarkCompact(), PrintShortHeapStatistics(), v8::internal::GCTracer::StartInSafepoint(), and v8::internal::GCTracer::StopInSafepoint().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ space()

Space * v8::internal::Heap::space ( int  idx) const
inline

◆ stack() [1/2]

◆ stack() [2/2]

const ::heap::base::Stack & v8::internal::Heap::stack ( ) const

Definition at line 6260 of file heap.cc.

6260  {
6263 }

References CHECK_NOT_NULL, main_thread_local_heap_, and v8::internal::LocalHeap::stack_.

◆ StartIncrementalMarking()

void v8::internal::Heap::StartIncrementalMarking ( GCFlags  gc_flags,
GarbageCollectionReason  gc_reason,
GCCallbackFlags  gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags,
GarbageCollector  collector = GarbageCollector::MARK_COMPACTOR 
)

Definition at line 1930 of file heap.cc.

1933  {
1934  DCHECK(incremental_marking()->IsStopped());
1935  CHECK_IMPLIES(!v8_flags.allow_allocation_in_fast_api_call,
1936  !isolate()->InFastCCall());
1938 
1939  if (gc_callbacks_depth_ > 0) {
1940  // Do not start incremental marking while invoking GC callbacks.
1941  // Heap::CollectGarbage already decided which GC is going to be
1942  // invoked. In case it chose a young-gen GC, starting an incremental
1943  // full GC during callbacks would break the separate GC phases
1944  // guarantee.
1945  return;
1946  }
1947 
1948  if (IsYoungGenerationCollector(collector)) {
1950  } else {
1951  // Sweeping needs to be completed such that markbits are all cleared before
1952  // starting marking again.
1954  }
1955 
1956  std::optional<SafepointScope> safepoint_scope;
1957 
1958  {
1959  AllowGarbageCollection allow_shared_gc;
1960  safepoint_scope.emplace(isolate(), kGlobalSafepointForSharedSpaceIsolate);
1961  }
1962 
1963 #ifdef DEBUG
1964  VerifyCountersAfterSweeping();
1965 #endif
1966 
1967  std::vector<Isolate*> paused_clients =
1969 
1970  // Now that sweeping is completed, we can start the next full GC cycle.
1971  tracer()->StartCycle(collector, gc_reason, nullptr,
1973 
1974  current_gc_flags_ = gc_flags;
1975  current_gc_callback_flags_ = gc_callback_flags;
1976 
1977  incremental_marking()->Start(collector, gc_reason);
1978 
1979  if (collector == GarbageCollector::MARK_COMPACTOR) {
1980  DCHECK(incremental_marking()->IsMajorMarking());
1984  }
1985 
1986  if (isolate()->is_shared_space_isolate()) {
1987  for (Isolate* client : paused_clients) {
1988  client->heap()->concurrent_marking()->Resume();
1989  }
1990  } else {
1991  DCHECK(paused_clients.empty());
1992  }
1993 }
int gc_callbacks_depth_
Definition: heap.h:2437
void Start(GarbageCollector garbage_collector, GarbageCollectionReason gc_reason)

References CHECK_IMPLIES, CompleteSweepingFull(), CompleteSweepingYoung(), current_gc_callback_flags_, current_gc_flags_, v8::internal::DCHECK(), DCHECK_EQ, gc_callbacks_depth_, incremental_marking(), is_full_gc_during_loading_, isolate(), IsYoungGenerationCollector(), v8::internal::kGlobalSafepointForSharedSpaceIsolate, v8::internal::GCTracer::kIncremental, v8::internal::MARK_COMPACTOR, PauseConcurrentThreadsInClients(), RecomputeLimitsAfterLoadingIfNeeded(), v8::internal::IncrementalMarking::Start(), v8::internal::GCTracer::StartCycle(), tracer(), v8::Isolate::TryGetCurrent(), update_allocation_limits_after_loading_, and v8::internal::v8_flags.

Referenced by CheckMemoryPressure(), CollectGarbageOnMemoryPressure(), HandleExternalMemoryInterrupt(), v8::internal::MemoryReducer::NotifyTimer(), StartIncrementalMarkingIfAllocationLimitIsReached(), and StartMinorMSConcurrentMarkingIfNeeded().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ StartIncrementalMarkingIfAllocationLimitIsReached()

void v8::internal::Heap::StartIncrementalMarkingIfAllocationLimitIsReached ( LocalHeap local_heap,
GCFlags  gc_flags,
GCCallbackFlags  gc_callback_flags = GCCallbackFlags::kNoGCCallbackFlags 
)

Definition at line 2037 of file heap.cc.

2039  {
2040  if (incremental_marking()->IsStopped() &&
2041  incremental_marking()->CanAndShouldBeStarted()) {
2042  switch (IncrementalMarkingLimitReached()) {
2044  if (local_heap->is_main_thread_for(this)) {
2046  gc_flags,
2050  gc_callback_flags);
2051  } else {
2052  ExecutionAccess access(isolate());
2053  isolate()->stack_guard()->RequestStartIncrementalMarking();
2054  if (auto* job = incremental_marking()->incremental_marking_job()) {
2055  job->ScheduleTask();
2056  }
2057  }
2058  break;
2060  if (auto* job = incremental_marking()->incremental_marking_job()) {
2061  job->ScheduleTask(TaskPriority::kUserVisible);
2062  }
2063  break;
2065  // This is a fallback case where no appropriate limits have been
2066  // configured yet.
2067  if (local_heap->is_main_thread_for(this) &&
2068  memory_reducer() != nullptr) {
2070  }
2071  break;
2073  break;
2074  }
2075  }
2076 }

References incremental_marking(), IncrementalMarkingLimitReached(), v8::internal::LocalHeap::is_main_thread_for(), isolate(), v8::internal::kAllocationLimit, kFallbackForEmbedderLimit, v8::internal::kGlobalAllocationLimit, kHardLimit, kNoLimit, kSoftLimit, v8::kUserVisible, memory_reducer(), NewSpaceTargetCapacity(), v8::internal::MemoryReducer::NotifyPossibleGarbage(), OldGenerationSpaceAvailable(), v8::internal::Isolate::stack_guard(), and StartIncrementalMarking().

Referenced by v8::internal::OldLargeObjectSpace::AllocateRaw(), CollectGarbage(), v8::internal::PagedSpaceAllocatorPolicy::EnsureAllocation(), HandleExternalMemoryInterrupt(), and StartIncrementalMarkingOnInterrupt().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ StartIncrementalMarkingOnInterrupt()

void v8::internal::Heap::StartIncrementalMarkingOnInterrupt ( )

Definition at line 2031 of file heap.cc.

References GCFlagsForIncrementalMarking(), v8::kGCCallbackScheduleIdleGarbageCollection, main_thread_local_heap(), and StartIncrementalMarkingIfAllocationLimitIsReached().

Referenced by v8::internal::StackGuard::HandleInterrupts().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ StartMinorMSConcurrentMarkingIfNeeded()

void v8::internal::Heap::StartMinorMSConcurrentMarkingIfNeeded ( )
private

Definition at line 1237 of file heap.cc.

1237  {
1238  if (incremental_marking()->IsMarking()) return;
1239  if (v8_flags.concurrent_minor_ms_marking && !IsTearingDown() &&
1240  incremental_marking()->CanAndShouldBeStarted() &&
1241  V8_LIKELY(!v8_flags.gc_global)) {
1242  size_t usable_capacity = 0;
1243  size_t new_space_size = 0;
1244  if (v8_flags.sticky_mark_bits) {
1245  // TODO(333906585): Adjust parameters.
1246  usable_capacity =
1248  new_space_size = sticky_space()->young_objects_size();
1249  } else {
1250  usable_capacity = paged_new_space()->paged_space()->UsableCapacity();
1251  new_space_size = new_space()->Size();
1252  }
1253  if ((usable_capacity >=
1254  v8_flags.minor_ms_min_new_space_capacity_for_concurrent_marking_mb *
1255  MB) &&
1256  (new_space_size >= MinorMSConcurrentMarkingTrigger(this)) &&
1261  // Schedule a task for finalizing the GC if needed.
1263  }
1264  }
1265 }
bool IsTearingDown() const
Definition: heap.h:568
bool ShouldUseBackgroundThreads() const
Definition: heap.cc:454
size_t MinorMSConcurrentMarkingTrigger(Heap *heap)
Definition: heap.cc:1224

References v8::internal::PagedSpaceBase::Capacity(), incremental_marking(), IsTearingDown(), v8::internal::kNoFlags, v8::kNoGCCallbackFlags, v8::internal::kTask, v8::internal::MB, minor_gc_job(), v8::internal::MINOR_MARK_SWEEPER, v8::internal::anonymous_namespace{heap.cc}::MinorMSConcurrentMarkingTrigger(), new_space(), v8::internal::StickySpace::old_objects_size(), paged_new_space(), v8::internal::PagedNewSpace::paged_space(), ShouldUseBackgroundThreads(), StartIncrementalMarking(), sticky_space(), v8::internal::MinorGCJob::TryScheduleTask(), v8::internal::PagedSpaceForNewSpace::UsableCapacity(), v8::internal::v8_flags, V8_LIKELY, and v8::internal::StickySpace::young_objects_size().

Referenced by CollectGarbage(), and v8::internal::PagedSpaceAllocatorPolicy::EnsureAllocation().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ StartResizeNewSpace()

void v8::internal::Heap::StartResizeNewSpace ( )
private

Definition at line 3977 of file heap.cc.

3977  {
3979  DCHECK(v8_flags.minor_ms);
3982  size_t reduced_capacity = ComputeReducedNewSpaceSize(new_space());
3983  paged_new_space()->StartShrinking(reduced_capacity);
3984  }
3985 }
bool StartShrinking(size_t new_target_capacity)
Definition: new-spaces.h:616

References v8::internal::anonymous_namespace{heap.cc}::ComputeReducedNewSpaceSize(), v8::internal::DCHECK(), DCHECK_EQ, kNone, kShrink, new_space(), paged_new_space(), resize_new_space_mode_, ShouldResizeNewSpace(), v8::internal::PagedNewSpace::StartShrinking(), and v8::internal::v8_flags.

Referenced by v8::internal::MinorMarkSweepCollector::StartSweepNewSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ StartTearDown()

void v8::internal::Heap::StartTearDown ( )

Definition at line 6265 of file heap.cc.

6265  {
6266  if (cpp_heap_) {
6267  // This may invoke a GC in case marking is running to get us into a
6268  // well-defined state for tear down.
6270  }
6271 
6272  // Stressing incremental marking should make it likely to force a GC here with
6273  // a CppHeap present. Stress compaction serves as a more deterministic way to
6274  // trigger such a GC.
6275  if (v8_flags.stress_compaction) {
6277  }
6278 
6279  // the heap during teardown.
6281 
6282  if (v8_flags.concurrent_marking) {
6284  }
6285 
6287 
6288  // Background threads may allocate and block until GC is performed. However
6289  // this might never happen when the main thread tries to quit and doesn't
6290  // process the event queue anymore. Avoid this deadlock by allowing all
6291  // allocations after tear down was requested to make sure all background
6292  // threads finish.
6293  collection_barrier_->NotifyShutdownRequested();
6294 
6295  // Main thread isn't going to allocate anymore.
6297 
6299 }
void StartDetachingIsolate()
Definition: cpp-heap.cc:638
ConcurrentMarking * concurrent_marking() const
Definition: heap.h:1125

References CollectGarbage(), collection_barrier_, CompleteSweepingFull(), concurrent_marking(), cpp_heap_, v8::internal::LocalHeap::FreeLinearAllocationAreas(), FreeMainThreadLinearAllocationAreas(), v8::internal::CppHeap::From(), v8::internal::kTesting, main_thread_local_heap(), v8::internal::OLD_SPACE, v8::internal::ConcurrentMarking::Pause(), SetGCState(), v8::internal::CppHeap::StartDetachingIsolate(), TEAR_DOWN, and v8::internal::v8_flags.

Referenced by v8::internal::Isolate::Deinit().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ StaticRootsEnsureAllocatedSize()

void v8::internal::Heap::StaticRootsEnsureAllocatedSize ( DirectHandle< HeapObject obj,
int  required 
)
private

Definition at line 898 of file setup-heap-internal.cc.

899  {
901  int obj_size = obj->Size();
902  if (required == obj_size) return;
903  CHECK_LT(obj_size, required);
904  int filler_size = required - obj_size;
905 
906  Tagged<HeapObject> filler =
910  CreateFillerObjectAt(filler.address(), filler_size,
912 
913  CHECK_EQ(filler.address(), obj->address() + obj_size);
914  CHECK_EQ(filler.address() + filler->Size(), obj->address() + required);
915  }
916 }

References v8::internal::DirectHandle< T >::address(), v8::internal::Tagged< HeapObject >::address(), v8::internal::HeapAllocator::AllocateRawWith(), allocator(), CHECK_EQ, CHECK_LT, CreateFillerObjectAt(), v8::internal::kClearFreedMemory, v8::internal::kReadOnly, v8::internal::HeapAllocator::kRetryOrFail, v8::internal::kRuntime, v8::internal::kTaggedAligned, V8_STATIC_ROOTS_BOOL, and V8_STATIC_ROOTS_GENERATION_BOOL.

Referenced by CreateReadOnlyApiObjects(), and CreateReadOnlyObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ sticky_space()

StickySpace * v8::internal::Heap::sticky_space ( ) const
inline

◆ SurvivedYoungObjectSize()

size_t v8::internal::Heap::SurvivedYoungObjectSize ( )
inline

Definition at line 1371 of file heap.h.

1371  {
1373  }

Referenced by v8::internal::GCTracer::StopInSafepoint().

+ Here is the caller graph for this function:

◆ sweeper()

◆ sweeping_in_progress()

bool v8::internal::Heap::sweeping_in_progress ( ) const
inline

Definition at line 1591 of file heap.h.

1591 { return sweeper_->sweeping_in_progress(); }

Referenced by CompleteSweepingFull(), EnsureSweepingCompleted(), EnsureSweepingCompletedForObject(), PrintShortHeapStatistics(), v8::internal::PagedSpaceAllocatorPolicy::RefillLab(), and v8::internal::IncrementalMarking::Start().

+ Here is the caller graph for this function:

◆ sweeping_in_progress_for_space()

bool v8::internal::Heap::sweeping_in_progress_for_space ( AllocationSpace  space) const
inline

Definition at line 1592 of file heap.h.

1592  {
1593  return sweeper_->sweeping_in_progress_for_space(space);
1594  }

References space.

◆ TearDown()

void v8::internal::Heap::TearDown ( )

Definition at line 6320 of file heap.cc.

6320  {
6322 
6323  // Assert that there are no background threads left and no executable memory
6324  // chunks are unprotected.
6326 
6327  DCHECK(concurrent_marking()->IsStopped());
6328 
6329  // It's too late for Heap::Verify() here, as parts of the Isolate are
6330  // already gone by the time this is called.
6331 
6333 
6334  if (v8_flags.fuzzer_gc_analysis) {
6335  if (v8_flags.stress_marking > 0) {
6337  }
6338  if (IsStressingScavenge()) {
6340  }
6341  }
6342 
6343  if (cpp_heap_) {
6345  cpp_heap_ = nullptr;
6347  }
6348 
6349  minor_gc_job_.reset();
6350 
6355  }
6357 
6358  if (IsStressingScavenge()) {
6362  stress_scavenge_observer_ = nullptr;
6363  }
6364 
6366  mark_compact_collector_->TearDown();
6367  mark_compact_collector_.reset();
6368  }
6369 
6371  minor_mark_sweep_collector_->TearDown();
6373  }
6374 
6375  sweeper_->TearDown();
6376  sweeper_.reset();
6377 
6378  scavenger_collector_.reset();
6379  array_buffer_sweeper_.reset();
6380  incremental_marking_.reset();
6381  concurrent_marking_.reset();
6382 
6383  memory_measurement_.reset();
6385  ephemeron_remembered_set_.reset();
6386 
6387  if (memory_reducer_ != nullptr) {
6388  memory_reducer_->TearDown();
6389  memory_reducer_.reset();
6390  }
6391 
6392  live_object_stats_.reset();
6393  dead_object_stats_.reset();
6394 
6395  embedder_roots_handler_ = nullptr;
6396 
6397  tracer_.reset();
6398 
6400 
6401  for (int i = FIRST_MUTABLE_SPACE; i <= LAST_MUTABLE_SPACE; i++) {
6402  space_[i].reset();
6403  }
6404 
6405  read_only_space_ = nullptr;
6406 
6409 
6410  StrongRootsEntry* next = nullptr;
6411  for (StrongRootsEntry* current = strong_roots_head_; current;
6412  current = next) {
6413  next = current->next;
6414  delete current;
6415  }
6416  strong_roots_head_ = nullptr;
6417 
6418  memory_allocator_.reset();
6419 
6420  heap_profiler_.reset();
6421 }
void PrintMaxMarkingLimitReached()
Definition: heap.cc:6118
void PrintMaxNewSpaceSizeReached()
Definition: heap.cc:6123
void RemoveAllocationObserversFromAllSpaces(AllocationObserver *observer, AllocationObserver *new_space_observer)
Definition: heap.cc:1034
void RunReleaseCppHeapCallback(std::unique_ptr< v8::CppHeap > cpp_heap)
Definition: isolate.cc:6975
V8_EXPORT_PRIVATE void RemoveAllocationObserver(AllocationObserver *observer)
V8_EXPORT_PRIVATE void TearDown()
void ReleaseOnTearDown(Isolate *isolate)
Definition: page-pool.cc:296
@ FIRST_MUTABLE_SPACE
Definition: globals.h:1318
@ LAST_MUTABLE_SPACE
Definition: globals.h:1319

References allocation_tracker_for_debugging_, allocator(), array_buffer_sweeper_, v8::internal::IsolateSafepoint::AssertMainThreadIsOnlyThread(), concurrent_marking(), concurrent_marking_, cpp_heap_, v8::internal::DCHECK(), DCHECK_EQ, dead_object_stats_, v8::internal::CppHeap::DetachIsolate(), embedder_roots_handler_, ephemeron_remembered_set_, v8::internal::FIRST_MUTABLE_SPACE, v8::internal::CppHeap::From(), gc_state(), heap_profiler_, v8::internal::anonymous_namespace{json-stringifier.cc}::i, incremental_marking_, isolate(), isolate_, IsStressingScavenge(), v8::internal::LAST_MUTABLE_SPACE, live_object_stats_, mark_compact_collector_, memory_allocator(), memory_allocator_, memory_measurement_, memory_reducer_, minor_gc_job_, minor_mark_sweep_collector_, need_to_remove_stress_concurrent_allocation_observer_, v8::internal::HeapAllocator::new_space_allocator(), v8::internal::StrongRootsEntry::next, owning_cpp_heap_, v8::internal::MemoryAllocator::pool(), pretenuring_handler_, PrintMaxMarkingLimitReached(), PrintMaxNewSpaceSizeReached(), read_only_space_, v8::internal::PagePool::ReleaseOnTearDown(), v8::internal::MainAllocator::RemoveAllocationObserver(), RemoveAllocationObserversFromAllSpaces(), v8::internal::PretenuringHandler::reset(), v8::internal::Isolate::RunReleaseCppHeapCallback(), safepoint(), scavenger_collector_, space_, stress_concurrent_allocation_observer_, stress_scavenge_observer_, strong_roots_head_, sweeper_, TEAR_DOWN, v8::internal::MemoryAllocator::TearDown(), tracer_, UpdateMaximumCommitted(), and v8::internal::v8_flags.

Referenced by v8::internal::Isolate::Deinit().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ TearDownWithSharedHeap()

void v8::internal::Heap::TearDownWithSharedHeap ( )

Definition at line 6301 of file heap.cc.

6301  {
6303 
6304  // Assert that there are no background threads left and no executable memory
6305  // chunks are unprotected.
6307 
6308  // Now that all threads are stopped, verify the heap before tearing down the
6309  // heap/isolate.
6311 
6312  // Might use the external pointer which might be in the shared heap.
6314 
6315  // Publish shared object worklist for the main thread if incremental marking
6316  // is enabled for the shared heap.
6318 }
MarkingBarrier * marking_barrier()
Definition: local-heap.h:130

References v8::internal::IsolateSafepoint::AssertMainThreadIsOnlyThread(), DCHECK_EQ, external_string_table_, gc_state(), main_thread_local_heap(), v8::internal::LocalHeap::marking_barrier(), v8::internal::MarkingBarrier::PublishSharedIfNeeded(), safepoint(), TEAR_DOWN, v8::internal::Heap::ExternalStringTable::TearDown(), and v8::internal::HeapVerifier::VerifyHeapIfEnabled().

Referenced by v8::internal::Isolate::Deinit().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ToBoolean()

Tagged< Boolean > v8::internal::Heap::ToBoolean ( bool  condition)
inline

Definition at line 381 of file heap-inl.h.

381  {
382  ReadOnlyRoots roots(this);
383  return roots.boolean_value(condition);
384 }

References v8::internal::ReadOnlyRoots::boolean_value().

Referenced by v8::internal::anonymous_namespace{runtime-object.cc}::DeleteProperty(), v8::internal::RUNTIME_FUNCTION(), v8::internal::Object::ToBoolean(), and v8::internal::PropertyDescriptor::ToObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ TotalGlobalHandlesSize()

size_t v8::internal::Heap::TotalGlobalHandlesSize ( )

Definition at line 1017 of file heap.cc.

1017  {
1018  return isolate_->global_handles()->TotalSize() +
1020 }

References v8::internal::Isolate::global_handles(), isolate_, v8::internal::TracedHandles::total_size_bytes(), v8::internal::GlobalHandles::TotalSize(), and v8::internal::Isolate::traced_handles().

+ Here is the call graph for this function:

◆ tracer() [1/2]

GCTracer* v8::internal::Heap::tracer ( )
inline

Definition at line 843 of file heap.h.

843 { return tracer_.get(); }

Referenced by v8::internal::CppHeap::MetricRecorderAdapter::AddMainThreadEvent(), v8::internal::IncrementalMarkingJob::AverageTimeToTask(), CallGCEpilogueCallbacks(), CallGCPrologueCallbacks(), v8::internal::MinorMarkSweepCollector::ClearNonLiveReferences(), v8::internal::MinorMarkSweepCollector::CollectGarbage(), v8::internal::ScavengerCollector::CollectGarbage(), CollectGarbage(), v8::internal::anonymous_namespace{heap.cc}::CompleteArrayBufferSweeping(), CompleteSweepingFull(), v8::internal::ScavengerCollector::CompleteSweepingQuarantinedPagesIfNeeded(), v8::internal::MarkCompactCollector::ComputeEvacuationHeuristics(), v8::internal::Sweeper::EnsureMajorCompleted(), v8::internal::Sweeper::EnsurePageIsSwept(), EnsureSweepingCompleted(), EnsureYoungSweepingCompleted(), v8::internal::GlobalSafepoint::EnterGlobalSafepointScope(), v8::internal::IsolateSafepoint::EnterLocalSafepointScope(), v8::internal::MarkCompactCollector::Finish(), v8::internal::MinorMarkSweepCollector::Finish(), v8::internal::MinorMarkSweepCollector::FinishConcurrentMarking(), v8::internal::Sweeper::FinishMajorJobs(), v8::internal::CppHeap::FinishMarkingAndProcessWeakness(), v8::internal::Sweeper::FinishMinorJobs(), GarbageCollectionEpilogue(), GarbageCollectionEpilogueInSafepoint(), GarbageCollectionPrologue(), GarbageCollectionPrologueInSafepoint(), v8::internal::ArrayBufferSweeper::GetTraceIdForFlowEvent(), HasLowEmbedderAllocationRate(), HasLowOldGenerationAllocationRate(), HasLowYoungGenerationAllocationRate(), IncrementalMarkingLimitReached(), v8::internal::CppHeap::InitializeMarking(), InvokeNearHeapLimitCallback(), v8::internal::GlobalHandles::InvokeSecondPassPhantomCallbacks(), MarkCompactEpilogue(), MarkCompactPrologue(), v8::internal::MinorMarkSweepCollector::MarkLiveObjects(), v8::internal::MinorMarkSweepCollector::MarkRoots(), v8::internal::MarkCompactCollector::MarkRootsFromConservativeStack(), v8::internal::MinorMarkSweepCollector::MarkRootsFromConservativeStack(), v8::internal::MinorMarkSweepCollector::MarkRootsFromTracedHandles(), MinorMarkSweep(), NotifyContextDisposed(), PerformGarbageCollection(), v8::internal::MinorMarkSweepCollector::PerformWrapperTracing(), v8::internal::MarkCompactCollector::Prepare(), v8::internal::PretenuringHandler::ProcessPretenuringFeedback(), RecomputeLimits(), v8::internal::ArrayBufferSweeper::RequestSweep(), v8::internal::ArrayBufferSweeper::SweepingState::SweepingJob::Run(), v8::internal::ConcurrentMarking::JobTaskMajor::Run(), v8::internal::ConcurrentMarking::JobTaskMinor::Run(), v8::internal::ScavengerCollector::QuarantinedPageSweeper::JobTask::Run(), v8::internal::ConcurrentMarking::RunMajor(), v8::internal::ConcurrentMarking::RunMinorImpl(), Scavenge(), v8::internal::Isolate::SetPriority(), ShrinkOldGenerationAllocationLimitIfNotConfigured(), v8::internal::LocalHeap::SleepInSafepoint(), v8::internal::LocalHeap::SleepInUnpark(), v8::internal::IncrementalMarking::Start(), StartIncrementalMarking(), v8::internal::Sweeper::StartMajorSweeperTasks(), v8::internal::Sweeper::StartMajorSweeping(), v8::internal::MinorMarkSweepCollector::StartMarking(), v8::internal::MarkCompactCollector::StartMarking(), v8::internal::Sweeper::StartMinorSweeperTasks(), v8::internal::Sweeper::StartMinorSweeping(), v8::internal::MinorMarkSweepCollector::StartSweepNewSpace(), v8::internal::MinorMarkSweepCollector::StartSweepNewSpaceWithStickyBits(), v8::internal::IncrementalMarking::Step(), v8::internal::MinorMarkSweepCollector::Sweep(), v8::internal::Sweeper::SweepEmptyNewSpacePage(), v8::internal::MinorMarkSweepCollector::SweepNewLargeSpace(), v8::internal::anonymous_namespace{mark-compact.cc}::TraceEvacuation(), UpdateSurvivalStatistics(), and v8::internal::MarkCompactCollector::VerifyMarking().

+ Here is the caller graph for this function:

◆ tracer() [2/2]

const GCTracer* v8::internal::Heap::tracer ( ) const
inline

Definition at line 844 of file heap.h.

844 { return tracer_.get(); }

◆ trusted_lo_space()

TrustedLargeObjectSpace* v8::internal::Heap::trusted_lo_space ( ) const
inline

Definition at line 786 of file heap.h.

786  {
787  return trusted_lo_space_;
788  }

Referenced by DeactivateMajorGCInProgressFlag(), v8::internal::OldGenerationMemoryChunkIterator::next(), OldGenerationSizeOfObjects(), and Unmark().

+ Here is the caller graph for this function:

◆ trusted_space()

TrustedSpace* v8::internal::Heap::trusted_space ( ) const
inline

◆ TryFindCodeForInnerPointerForPrinting()

std::optional< Tagged< Code > > v8::internal::Heap::TryFindCodeForInnerPointerForPrinting ( Address  inner_pointer)

Definition at line 7364 of file heap.cc.

7365  {
7366  if (InSpaceSlow(inner_pointer, i::CODE_SPACE) ||
7367  InSpaceSlow(inner_pointer, i::CODE_LO_SPACE) ||
7369  std::optional<Tagged<GcSafeCode>> maybe_code =
7370  GcSafeTryFindCodeForInnerPointer(inner_pointer);
7371  if (maybe_code.has_value()) {
7372  return maybe_code.value()->UnsafeCastToCode();
7373  }
7374  }
7375  return {};
7376 }
V8_EXPORT_PRIVATE bool InSpaceSlow(Address addr, AllocationSpace space) const
Definition: heap.cc:4573
static bool PcIsOffHeap(Isolate *isolate, Address pc)

References v8::internal::CODE_LO_SPACE, v8::internal::CODE_SPACE, GcSafeTryFindCodeForInnerPointer(), InSpaceSlow(), isolate(), and v8::internal::OffHeapInstructionStream::PcIsOffHeap().

Referenced by _v8_internal_Print_Code(), and _v8_internal_Print_OnlyCode().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ Unmark()

void v8::internal::Heap::Unmark ( )

Definition at line 3726 of file heap.cc.

3726  {
3727  DCHECK(v8_flags.sticky_mark_bits);
3729 
3730  auto unmark_space = [](auto& space) {
3731  for (auto* page : space) {
3732  page->marking_bitmap()->template Clear<AccessMode::NON_ATOMIC>();
3733  page->Chunk()->SetMajorGCInProgress();
3734  page->SetLiveBytes(0);
3735  }
3736  };
3737 
3738  unmark_space(*old_space());
3739  unmark_space(*lo_space());
3740 
3741  if (isolate()->is_shared_space_isolate()) {
3742  unmark_space(*shared_space());
3743  unmark_space(*shared_lo_space());
3744  }
3745 
3746  {
3747  RwxMemoryWriteScope scope("For writing flags.");
3748  unmark_space(*code_space());
3749  unmark_space(*code_lo_space());
3750  }
3751 
3752  unmark_space(*trusted_space());
3753  unmark_space(*trusted_lo_space());
3754 }

References code_lo_space(), code_space(), v8::internal::DCHECK(), DCHECK_NULL, isolate(), lo_space(), new_space(), old_space(), shared_lo_space(), shared_space(), space(), trusted_lo_space(), trusted_space(), and v8::internal::v8_flags.

Referenced by v8::internal::MarkCompactCollector::StartMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UnmarkSharedLinearAllocationAreas()

void v8::internal::Heap::UnmarkSharedLinearAllocationAreas ( )
private

Definition at line 3707 of file heap.cc.

3707  {
3708  DCHECK(!v8_flags.black_allocated_pages);
3711  safepoint()->IterateLocalHeaps([](LocalHeap* local_heap) {
3712  local_heap->UnmarkSharedLinearAllocationsArea();
3713  });
3714 }
void UnmarkSharedLinearAllocationsArea()
Definition: local-heap.cc:439

References allocator(), v8::internal::DCHECK(), v8::internal::IsolateSafepoint::IterateLocalHeaps(), main_thread_local_heap(), safepoint(), v8::internal::HeapAllocator::UnmarkSharedLinearAllocationAreas(), v8::internal::LocalHeap::UnmarkSharedLinearAllocationsArea(), and v8::internal::v8_flags.

+ Here is the call graph for this function:

◆ UnregisterStrongRoots()

void v8::internal::Heap::UnregisterStrongRoots ( StrongRootsEntry entry)

Definition at line 7006 of file heap.cc.

7006  {
7007  // We're either on the main thread, or in a background thread with an active
7008  // local heap.
7009  DCHECK(isolate()->CurrentLocalHeap()->IsRunning());
7010 
7012 
7013  StrongRootsEntry* prev = entry->prev;
7014  StrongRootsEntry* next = entry->next;
7015 
7016  if (prev) prev->next = next;
7017  if (next) next->prev = prev;
7018 
7019  if (strong_roots_head_ == entry) {
7020  DCHECK_NULL(prev);
7021  strong_roots_head_ = next;
7022  }
7023 
7024  delete entry;
7025 }

References v8::internal::DCHECK(), DCHECK_NULL, isolate(), v8::internal::StrongRootsEntry::next, v8::internal::StrongRootsEntry::prev, strong_roots_head_, and strong_roots_mutex_.

Referenced by v8::internal::IdentityMapBase::Clear().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateExternalMemory()

uint64_t v8::internal::Heap::UpdateExternalMemory ( int64_t  delta)

Definition at line 6963 of file heap.cc.

6963  {
6964  uint64_t amount = external_memory_.UpdateAmount(delta);
6965  uint64_t low_since_mark_compact = external_memory_.low_since_mark_compact();
6966  if (amount < low_since_mark_compact) {
6968  }
6969  return amount;
6970 }
uint64_t UpdateAmount(int64_t delta)
Definition: heap.h:251

References external_memory_, v8::internal::Heap::ExternalMemoryAccounting::low_since_mark_compact(), v8::internal::Heap::ExternalMemoryAccounting::UpdateAmount(), and v8::internal::Heap::ExternalMemoryAccounting::UpdateLowSinceMarkCompact().

Referenced by v8::Isolate::AdjustAmountOfExternalAllocatedMemoryImpl(), and v8::ExternalMemoryAccounter::Decrease().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateExternalString()

void v8::internal::Heap::UpdateExternalString ( Tagged< String string,
size_t  old_payload,
size_t  new_payload 
)

Definition at line 2823 of file heap.cc.

2824  {
2825  DCHECK(IsExternalString(string));
2826 
2828 
2829  if (old_payload > new_payload) {
2830  page->DecrementExternalBackingStoreBytes(
2831  ExternalBackingStoreType::kExternalString, old_payload - new_payload);
2832  } else {
2833  page->IncrementExternalBackingStoreBytes(
2834  ExternalBackingStoreType::kExternalString, new_payload - old_payload);
2835  }
2836 }

References v8::internal::DCHECK(), v8::internal::MutablePageMetadata::DecrementExternalBackingStoreBytes(), v8::internal::PageMetadata::FromHeapObject(), v8::internal::MutablePageMetadata::IncrementExternalBackingStoreBytes(), v8::internal::InstanceTypeChecker::IsExternalString(), and v8::internal::kExternalString.

Referenced by v8::internal::anonymous_namespace{string.cc}::MigrateExternalStringResource(), v8::internal::anonymous_namespace{deserializer.cc}::PostProcessExternalString(), v8::internal::ExternalOneByteString::SetResource(), and v8::internal::ExternalTwoByteString::SetResource().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateMaximumCommitted()

void v8::internal::Heap::UpdateMaximumCommitted ( )

Definition at line 391 of file heap.cc.

391  {
392  if (!HasBeenSetUp()) return;
393 
394  const size_t current_committed_memory = CommittedMemory();
395  if (current_committed_memory > maximum_committed_) {
396  maximum_committed_ = current_committed_memory;
397  }
398 }

References CommittedMemory(), HasBeenSetUp(), and maximum_committed_.

Referenced by GarbageCollectionEpilogue(), GarbageCollectionPrologue(), and TearDown().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateOldGenerationAllocationCounter()

void v8::internal::Heap::UpdateOldGenerationAllocationCounter ( )
inline

Definition at line 1393 of file heap.h.

1393  {
1396  }
size_t OldGenerationAllocationCounter()
Definition: heap.h:1398

Referenced by MarkCompact(), and RecomputeLimitsAfterLoadingIfNeeded().

+ Here is the caller graph for this function:

◆ UpdateReferencesInExternalStringTable()

void v8::internal::Heap::UpdateReferencesInExternalStringTable ( ExternalStringTableUpdaterCallback  updater_func)
private

Definition at line 3008 of file heap.cc.

3009  {
3011 }
void UpdateReferences(Heap::ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:2996

References external_string_table_, and v8::internal::Heap::ExternalStringTable::UpdateReferences().

+ Here is the call graph for this function:

◆ UpdateStrongRoots()

void v8::internal::Heap::UpdateStrongRoots ( StrongRootsEntry entry,
FullObjectSlot  start,
FullObjectSlot  end 
)

Definition at line 7000 of file heap.cc.

7001  {
7002  entry->start = start;
7003  entry->end = end;
7004 }

References v8::internal::compiler::end(), v8::internal::StrongRootsEntry::end, and v8::internal::StrongRootsEntry::start.

Referenced by v8::internal::IdentityMapBase::Resize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateSurvivalStatistics()

void v8::internal::Heap::UpdateSurvivalStatistics ( int  start_new_space_size)

Definition at line 2247 of file heap.cc.

2247  {
2248  if (start_new_space_size == 0) return;
2249 
2250  promotion_ratio_ = (static_cast<double>(promoted_objects_size_) /
2251  static_cast<double>(start_new_space_size) * 100);
2252 
2254  promotion_rate_ =
2255  (static_cast<double>(promoted_objects_size_) /
2256  static_cast<double>(previous_new_space_surviving_object_size_) * 100);
2257  } else {
2258  promotion_rate_ = 0;
2259  }
2260 
2262  (static_cast<double>(new_space_surviving_object_size_) /
2263  static_cast<double>(start_new_space_size) * 100);
2264 
2265  double survival_rate = promotion_ratio_ + new_space_surviving_rate_;
2266  tracer()->AddSurvivalRatio(survival_rate);
2267 }
void AddSurvivalRatio(double survival_ratio)
Definition: gc-tracer.cc:737
double new_space_surviving_rate_
Definition: heap.h:2318
double promotion_ratio_
Definition: heap.h:2314
double promotion_rate_
Definition: heap.h:2315

References v8::internal::GCTracer::AddSurvivalRatio(), new_space_surviving_object_size_, new_space_surviving_rate_, previous_new_space_surviving_object_size_, promoted_objects_size_, promotion_rate_, promotion_ratio_, and tracer().

Referenced by PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateTotalGCTime()

void v8::internal::Heap::UpdateTotalGCTime ( base::TimeDelta  duration)
private

Definition at line 6883 of file heap.cc.

6883  {
6885 }

References v8::internal::duration, and total_gc_time_ms_.

Referenced by v8::internal::GCTracer::StopObservablePause().

+ Here is the caller graph for this function:

◆ UpdateYoungReferenceInExternalStringTableEntry()

Tagged< String > v8::internal::Heap::UpdateYoungReferenceInExternalStringTableEntry ( Heap heap,
FullObjectSlot  pointer 
)
static

Definition at line 2838 of file heap.cc.

2839  {
2840  // This is only used for Scavenger.
2841  DCHECK(!v8_flags.minor_ms);
2842 
2843  PtrComprCageBase cage_base(heap->isolate());
2844  Tagged<HeapObject> obj = Cast<HeapObject>(*p);
2845  MapWord first_word = obj->map_word(cage_base, kRelaxedLoad);
2846 
2847  Tagged<String> new_string;
2848 
2849  if (InFromPage(obj)) {
2850  if (!first_word.IsForwardingAddress()) {
2851  // Unreachable external string can be finalized.
2852  Tagged<String> string = Cast<String>(obj);
2853  if (!IsExternalString(string, cage_base)) {
2854  // Original external string has been internalized.
2855  DCHECK(IsThinString(string, cage_base));
2856  return Tagged<String>();
2857  }
2858  heap->FinalizeExternalString(string);
2859  return Tagged<String>();
2860  }
2861  new_string = Cast<String>(first_word.ToForwardingAddress(obj));
2862  } else {
2863  new_string = Cast<String>(obj);
2864  }
2865 
2866  // String is still reachable.
2867  if (IsThinString(new_string, cage_base)) {
2868  // Filtering Thin strings out of the external string table.
2869  return Tagged<String>();
2870  } else if (IsExternalString(new_string, cage_base)) {
2873  PageMetadata::FromAddress((*p).ptr()),
2874  PageMetadata::FromHeapObject(new_string),
2875  Cast<ExternalString>(new_string)->ExternalPayloadSize());
2876  return new_string;
2877  }
2878 
2879  // Internalization can replace external strings with non-external strings.
2880  return IsExternalString(new_string, cage_base) ? new_string
2881  : Tagged<String>();
2882 }
static void MoveExternalBackingStoreBytes(ExternalBackingStoreType type, MutablePageMetadata *from, MutablePageMetadata *to, size_t amount)

References v8::internal::DCHECK(), v8::internal::PageMetadata::FromAddress(), v8::internal::PageMetadata::FromHeapObject(), InFromPage(), v8::internal::InstanceTypeChecker::IsExternalString(), v8::internal::MapWord::IsForwardingAddress(), v8::internal::InstanceTypeChecker::IsThinString(), v8::internal::kExternalString, v8::kRelaxedLoad, v8::internal::MutablePageMetadata::MoveExternalBackingStoreBytes(), v8::internal::MapWord::ToForwardingAddress(), and v8::internal::v8_flags.

+ Here is the call graph for this function:

◆ UpdateYoungReferencesInExternalStringTable()

void v8::internal::Heap::UpdateYoungReferencesInExternalStringTable ( ExternalStringTableUpdaterCallback  updater_func)
private

Definition at line 2991 of file heap.cc.

2992  {
2994 }
void UpdateYoungReferences(Heap::ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:2933

References external_string_table_, and v8::internal::Heap::ExternalStringTable::UpdateYoungReferences().

Referenced by v8::internal::MinorMarkSweepCollector::Sweep().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ use_new_space()

bool v8::internal::Heap::use_new_space ( ) const
inline

Definition at line 1699 of file heap.h.

1699  {
1700  DCHECK_IMPLIES(new_space(), !v8_flags.sticky_mark_bits);
1701  return new_space() || v8_flags.sticky_mark_bits;
1702  }

References DCHECK_IMPLIES, and v8::internal::v8_flags.

Referenced by v8::internal::MinorMarkSweepCollector::CollectGarbage(), EnsureSweepingCompleted(), GarbageCollectionPrologue(), MinorMarkSweep(), v8::internal::MarkCompactCollector::Prepare(), v8::internal::ConcurrentMarking::RunMajor(), v8::internal::ConcurrentMarking::RunMinor(), and SelectGarbageCollector().

+ Here is the caller graph for this function:

◆ UsedGlobalHandlesSize()

size_t v8::internal::Heap::UsedGlobalHandlesSize ( )

Definition at line 1022 of file heap.cc.

1022  {
1023  return isolate_->global_handles()->UsedSize() +
1025 }

References v8::internal::Isolate::global_handles(), isolate_, v8::internal::Isolate::traced_handles(), v8::internal::TracedHandles::used_size_bytes(), and v8::internal::GlobalHandles::UsedSize().

+ Here is the call graph for this function:

◆ using_initial_limit()

bool v8::internal::Heap::using_initial_limit ( ) const
inlineprivate

Definition at line 1991 of file heap.h.

1991  {
1992  return using_initial_limit_.load(std::memory_order_relaxed);
1993  }

Referenced by EnsureSweepingCompleted(), IncrementalMarkingLimitReached(), PerformGarbageCollection(), RecomputeLimits(), and ShrinkOldGenerationAllocationLimitIfNotConfigured().

+ Here is the caller graph for this function:

◆ WeakenDescriptorArrays()

void v8::internal::Heap::WeakenDescriptorArrays ( GlobalHandleVector< DescriptorArray strong_descriptor_arrays)

Definition at line 6132 of file heap.cc.

6133  {
6134  if (incremental_marking()->IsMajorMarking()) {
6135  // During incremental/concurrent marking regular DescriptorArray objects are
6136  // treated with custom weakness. This weakness depends on
6137  // DescriptorArray::raw_gc_state() which is not set up properly upon
6138  // deserialization. The strong arrays are transitioned to weak ones at the
6139  // end of the GC.
6141  std::move(strong_descriptor_arrays));
6142  return;
6143  }
6144 
6145  // No GC is running, weaken the arrays right away.
6147  Tagged<Map> descriptor_array_map =
6148  ReadOnlyRoots(isolate()).descriptor_array_map();
6149  for (auto it = strong_descriptor_arrays.begin();
6150  it != strong_descriptor_arrays.end(); ++it) {
6151  Tagged<DescriptorArray> array = it.raw();
6152  DCHECK(IsStrongDescriptorArray(array));
6153  array->set_map_safe_transition_no_write_barrier(isolate(),
6154  descriptor_array_map);
6155  DCHECK_EQ(array->raw_gc_state(kRelaxedLoad), 0);
6156  }
6157 }
void RecordStrongDescriptorArraysForWeakening(GlobalHandleVector< DescriptorArray > strong_descriptor_arrays)

References v8::internal::GlobalHandleVector< T >::begin(), v8::internal::DCHECK(), DCHECK_EQ, v8::internal::GlobalHandleVector< T >::end(), incremental_marking(), isolate(), v8::kRelaxedLoad, mark_compact_collector(), v8::internal::anonymous_namespace{json-stringifier.cc}::no_gc, ReadOnlyRoots, and v8::internal::MarkCompactCollector::RecordStrongDescriptorArraysForWeakening().

Referenced by v8::internal::LocalHeap::WeakenDescriptorArrays().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ YoungArrayBufferBytes()

size_t v8::internal::Heap::YoungArrayBufferBytes ( )

Definition at line 6959 of file heap.cc.

6959  {
6960  return array_buffer_sweeper()->YoungBytes();
6961 }

References array_buffer_sweeper(), and v8::internal::ArrayBufferSweeper::YoungBytes().

+ Here is the call graph for this function:

◆ YoungGenerationCollector()

static GarbageCollector v8::internal::Heap::YoungGenerationCollector ( )
inlinestatic

Definition at line 368 of file heap.h.

368  {
371  }

References v8::internal::v8_flags.

Referenced by SelectGarbageCollector().

+ Here is the caller graph for this function:

◆ YoungGenerationConsumedBytes()

size_t v8::internal::Heap::YoungGenerationConsumedBytes ( ) const

Definition at line 5402 of file heap.cc.

5402  {
5403  if (!new_space()) {
5404  return 0;
5405  }
5407  if (v8_flags.minor_ms) {
5409  }
5410  // When using Scavenger, memory is compacted. Thus wasted space is always 0.
5411  // The diff between `new_space()->SizeOfObjects()` and
5412  // `new_space()->CurrentCapacitySafe()` is less than one page. Using capacity
5413  // here is also easier for concurrency since this method is reachable from
5414  // background old allocations.
5417 }
V8_EXPORT_PRIVATE size_t YoungGenerationSizeOfObjects() const
Definition: heap.cc:5385
V8_EXPORT_PRIVATE size_t YoungGenerationWastedBytes() const
Definition: heap.cc:5396
size_t CurrentCapacitySafe() const
Definition: new-spaces.h:294

References v8::internal::SemiSpaceNewSpace::CurrentCapacitySafe(), DCHECK_NOT_NULL, new_lo_space(), new_space(), semi_space_new_space(), v8::internal::LargeObjectSpace::SizeOfObjects(), v8::internal::v8_flags, YoungGenerationSizeOfObjects(), and YoungGenerationWastedBytes().

Referenced by AllocationLimitOvershotByLargeMargin().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ YoungGenerationSizeFromOldGenerationSize()

size_t v8::internal::Heap::YoungGenerationSizeFromOldGenerationSize ( size_t  old_generation_size)
static

Definition at line 209 of file heap.cc.

209  {
210  // Compute the semi space size and cap it.
211  bool is_low_memory = old_generation <= Heap::OldGenerationLowMemory();
212  size_t semi_space;
213  if (v8_flags.minor_ms && !is_low_memory) {
214  semi_space = DefaultMaxSemiSpaceSize();
215  } else {
216  size_t ratio = is_low_memory ? OldGenerationToSemiSpaceRatioLowMemory()
218  semi_space = old_generation / ratio;
219  semi_space = std::min({semi_space, DefaultMaxSemiSpaceSize()});
220  semi_space = std::max({semi_space, DefaultMinSemiSpaceSize()});
221  semi_space = RoundUp(semi_space, PageMetadata::kPageSize);
222  }
223  return YoungGenerationSizeFromSemiSpaceSize(semi_space);
224 }
static V8_EXPORT_PRIVATE size_t OldGenerationLowMemory()
Definition: heap.cc:5052
static size_t OldGenerationToSemiSpaceRatio()
Definition: heap.cc:5092
static size_t OldGenerationToSemiSpaceRatioLowMemory()
Definition: heap.cc:5102

References DefaultMaxSemiSpaceSize(), DefaultMinSemiSpaceSize(), v8::internal::MutablePageMetadata::kPageSize, OldGenerationLowMemory(), OldGenerationToSemiSpaceRatio(), OldGenerationToSemiSpaceRatioLowMemory(), RoundUp(), v8::internal::v8_flags, and YoungGenerationSizeFromSemiSpaceSize().

Referenced by GenerationSizesFromHeapSize(), and HeapSizeFromPhysicalMemory().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ YoungGenerationSizeFromSemiSpaceSize()

size_t v8::internal::Heap::YoungGenerationSizeFromSemiSpaceSize ( size_t  semi_space_size)
static

Definition at line 315 of file heap.cc.

315  {
316  return semi_space_size *
318 }

References kNewLargeObjectSpaceToSemiSpaceRatio, and v8::internal::anonymous_namespace{heap.cc}::NumberOfSemiSpaces().

Referenced by AllocatorLimitOnMaxOldGenerationSize(), ConfigureHeap(), MinYoungGenerationSize(), and YoungGenerationSizeFromOldGenerationSize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ YoungGenerationSizeOfObjects()

size_t v8::internal::Heap::YoungGenerationSizeOfObjects ( ) const

Definition at line 5385 of file heap.cc.

5385  {
5388  if (v8_flags.sticky_mark_bits) {
5389  return sticky_space()->young_objects_size() +
5391  }
5393  return new_space()->SizeOfObjects() + new_lo_space()->SizeOfObjects();
5394 }

References DCHECK_NOT_NULL, new_lo_space(), new_space(), v8::internal::LargeObjectSpace::SizeOfObjects(), sticky_space(), v8::internal::v8_flags, and v8::internal::StickySpace::young_objects_size().

Referenced by YoungGenerationConsumedBytes().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ YoungGenerationWastedBytes()

size_t v8::internal::Heap::YoungGenerationWastedBytes ( ) const

Definition at line 5396 of file heap.cc.

5396  {
5398  DCHECK(v8_flags.minor_ms);
5399  return paged_new_space()->paged_space()->Waste();
5400 }

References v8::internal::DCHECK(), DCHECK_NOT_NULL, new_space(), paged_new_space(), v8::internal::PagedNewSpace::paged_space(), v8::internal::v8_flags, and v8::internal::PagedSpaceBase::Waste().

Referenced by YoungGenerationConsumedBytes().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

Friends And Related Function Documentation

◆ ActivateMemoryReducerTask

friend class ActivateMemoryReducerTask
friend

Definition at line 2483 of file heap.h.

◆ AlwaysAllocateScope

friend class AlwaysAllocateScope
friend

Definition at line 2484 of file heap.h.

◆ ArrayBufferCollector

friend class ArrayBufferCollector
friend

Definition at line 2485 of file heap.h.

◆ ArrayBufferSweeper

friend class ArrayBufferSweeper
friend

Definition at line 2486 of file heap.h.

Referenced by SetUpSpaces().

◆ ConcurrentMarking

friend class ConcurrentMarking
friend

Definition at line 2488 of file heap.h.

Referenced by SetUp().

◆ ConservativePinningScope

friend class ConservativePinningScope
friend

Definition at line 2487 of file heap.h.

◆ ConservativeTracedHandlesMarkingVisitor

Definition at line 2489 of file heap.h.

◆ CppHeap

friend class CppHeap
friend

Definition at line 2490 of file heap.h.

◆ Deserializer

template<typename IsolateT >
friend class Deserializer
friend

Definition at line 2541 of file heap.h.

◆ DisableConservativeStackScanningScopeForTesting

Definition at line 2524 of file heap.h.

◆ EmbedderStackStateScope

friend class EmbedderStackStateScope
friend

Definition at line 2491 of file heap.h.

◆ EvacuateVisitorBase

friend class EvacuateVisitorBase
friend

Definition at line 2492 of file heap.h.

◆ Factory

friend class Factory
friend

Definition at line 2538 of file heap.h.

◆ GCCallbacksScope

friend class GCCallbacksScope
friend

Definition at line 2493 of file heap.h.

◆ GCTracer

friend class GCTracer
friend

Definition at line 2494 of file heap.h.

Referenced by SetUpSpaces().

◆ heap::HeapTester

friend class heap::HeapTester
friend

Definition at line 2547 of file heap.h.

◆ heap::TestMemoryAllocatorScope

friend class heap::TestMemoryAllocatorScope
friend

Definition at line 2535 of file heap.h.

◆ HeapAllocator

friend class HeapAllocator
friend

Definition at line 2495 of file heap.h.

◆ HeapInternalsBase

friend class HeapInternalsBase
friend

Definition at line 2550 of file heap.h.

◆ HeapObjectIterator

friend class HeapObjectIterator
friend

Definition at line 2496 of file heap.h.

◆ HeapVerifier

friend class HeapVerifier
friend

Definition at line 2497 of file heap.h.

◆ IgnoreLocalGCRequests

friend class IgnoreLocalGCRequests
friend

Definition at line 2498 of file heap.h.

◆ IncrementalMarking

friend class IncrementalMarking
friend

Definition at line 2499 of file heap.h.

Referenced by SetUp().

◆ IncrementalMarkingJob

friend class IncrementalMarkingJob
friend

Definition at line 2500 of file heap.h.

◆ Isolate

friend class Isolate
friend

Definition at line 2544 of file heap.h.

◆ LargeObjectSpace

friend class LargeObjectSpace
friend

Definition at line 2501 of file heap.h.

◆ LocalFactory

friend class LocalFactory
friend

Definition at line 2539 of file heap.h.

◆ LocalHeap

friend class LocalHeap
friend

Definition at line 2502 of file heap.h.

◆ MarkCompactCollector

friend class MarkCompactCollector
friend

Definition at line 2507 of file heap.h.

Referenced by SetUp().

◆ MarkingBarrier

friend class MarkingBarrier
friend

Definition at line 2503 of file heap.h.

◆ MarkingVisitorBase

template<typename ConcreteVisitor >
friend class MarkingVisitorBase
friend

Definition at line 2506 of file heap.h.

◆ MemoryBalancer

friend class MemoryBalancer
friend

Definition at line 2508 of file heap.h.

Referenced by SetUpSpaces().

◆ MinorGCJob

friend class MinorGCJob
friend

Definition at line 2509 of file heap.h.

Referenced by SetUpSpaces().

◆ MinorGCTaskObserver

friend class MinorGCTaskObserver
friend

Definition at line 2510 of file heap.h.

◆ MinorMarkSweepCollector

friend class MinorMarkSweepCollector
friend

Definition at line 2511 of file heap.h.

Referenced by SetUp().

◆ MinorMSIncrementalMarkingTaskObserver

friend class MinorMSIncrementalMarkingTaskObserver
friend

Definition at line 2512 of file heap.h.

◆ NewLargeObjectSpace

friend class NewLargeObjectSpace
friend

Definition at line 2513 of file heap.h.

◆ NewSpace

friend class NewSpace
friend

Definition at line 2514 of file heap.h.

◆ ObjectStatsCollector

friend class ObjectStatsCollector
friend

Definition at line 2515 of file heap.h.

◆ OldLargeObjectSpace

friend class OldLargeObjectSpace
friend

Definition at line 2504 of file heap.h.

◆ PagedNewSpaceAllocatorPolicy

friend class PagedNewSpaceAllocatorPolicy
friend

Definition at line 2517 of file heap.h.

◆ PagedSpaceAllocatorPolicy

friend class PagedSpaceAllocatorPolicy
friend

Definition at line 2518 of file heap.h.

◆ PagedSpaceBase

friend class PagedSpaceBase
friend

Definition at line 2519 of file heap.h.

◆ PagedSpaceForNewSpace

friend class PagedSpaceForNewSpace
friend

Definition at line 2520 of file heap.h.

◆ PageMetadata

friend class PageMetadata
friend

Definition at line 2516 of file heap.h.

◆ PauseAllocationObserversScope

friend class PauseAllocationObserversScope
friend

Definition at line 2521 of file heap.h.

◆ PretenuringHandler

friend class PretenuringHandler
friend

Definition at line 2522 of file heap.h.

◆ ReadOnlyRoots

◆ Scavenger

friend class Scavenger
friend

Definition at line 2525 of file heap.h.

◆ ScavengerCollector

friend class ScavengerCollector
friend

Definition at line 2526 of file heap.h.

Referenced by SetUp().

◆ ScheduleMinorGCTaskObserver

friend class ScheduleMinorGCTaskObserver
friend

Definition at line 2527 of file heap.h.

◆ SemiSpaceNewSpace

friend class SemiSpaceNewSpace
friend

Definition at line 2528 of file heap.h.

◆ SemiSpaceNewSpaceAllocatorPolicy

friend class SemiSpaceNewSpaceAllocatorPolicy
friend

Definition at line 2529 of file heap.h.

◆ Space

friend class Space
friend

Definition at line 2531 of file heap.h.

◆ SpaceWithLinearArea

friend class SpaceWithLinearArea
friend

Definition at line 2532 of file heap.h.

◆ StressConcurrentAllocationObserver

Definition at line 2530 of file heap.h.

Referenced by NotifyDeserializationComplete().

◆ Sweeper

friend class Sweeper
friend

Definition at line 2533 of file heap.h.

Referenced by SetUp().

◆ UnifiedHeapMarkingState

friend class UnifiedHeapMarkingState
friend

Definition at line 2534 of file heap.h.

Member Data Documentation

◆ allocation_sites_list_

Tagged<UnionOf<Smi, Undefined, AllocationSiteWithWeakNext> > v8::internal::Heap::allocation_sites_list_ = Smi::zero()
private

Definition at line 2301 of file heap.h.

◆ allocation_tracker_for_debugging_

std::unique_ptr<AllocationTrackerForDebugging> v8::internal::Heap::allocation_tracker_for_debugging_
private

Definition at line 2346 of file heap.h.

Referenced by SetUpSpaces(), and TearDown().

◆ allocation_trackers_

std::vector<HeapObjectAllocationTracker*> v8::internal::Heap::allocation_trackers_
private

◆ allocation_type_for_in_place_internalizable_strings_

const AllocationType v8::internal::Heap::allocation_type_for_in_place_internalizable_strings_
private

Definition at line 2431 of file heap.h.

◆ always_allocate_scope_count_

std::atomic<size_t> v8::internal::Heap::always_allocate_scope_count_ {0}
private

◆ array_buffer_sweeper_

std::unique_ptr<ArrayBufferSweeper> v8::internal::Heap::array_buffer_sweeper_
private

◆ backing_store_bytes_

std::atomic<uint64_t> v8::internal::Heap::backing_store_bytes_ {0}
private

◆ code_lo_space_

◆ code_range_

std::unique_ptr<CodeRange> v8::internal::Heap::code_range_
private

◆ code_range_size_

size_t v8::internal::Heap::code_range_size_ = 0
private

Definition at line 2143 of file heap.h.

Referenced by ConfigureHeap(), and SetUp().

◆ code_space_

CodeSpace* v8::internal::Heap::code_space_ = nullptr
private

◆ collection_barrier_

◆ concurrent_marking_

std::unique_ptr<ConcurrentMarking> v8::internal::Heap::concurrent_marking_
private

Definition at line 2338 of file heap.h.

Referenced by SetUp(), and TearDown().

◆ configured_

bool v8::internal::Heap::configured_ = false
private

Definition at line 2413 of file heap.h.

Referenced by ConfigureHeap(), ResetOldGenerationAndGlobalAllocationLimit(), and SetUp().

◆ consecutive_ineffective_mark_compacts_

int v8::internal::Heap::consecutive_ineffective_mark_compacts_ = 0
private

Definition at line 2279 of file heap.h.

Referenced by CheckIneffectiveMarkCompact().

◆ contexts_disposed_

int v8::internal::Heap::contexts_disposed_ = 0
private

Definition at line 2204 of file heap.h.

Referenced by MarkCompact(), and NotifyContextDisposed().

◆ cpp_heap_

◆ current_gc_callback_flags_

◆ current_gc_flags_

◆ current_or_last_garbage_collector_

GarbageCollector v8::internal::Heap::current_or_last_garbage_collector_
private
Initial value:

Definition at line 2426 of file heap.h.

Referenced by CollectGarbage().

◆ dead_object_stats_

std::unique_ptr<ObjectStats> v8::internal::Heap::dead_object_stats_
private

Definition at line 2342 of file heap.h.

Referenced by CreateObjectStats(), SetUpSpaces(), and TearDown().

◆ deferred_counters_

base::SmallVector<v8::Isolate::UseCounterFeature, 8> v8::internal::Heap::deferred_counters_
private

Definition at line 2311 of file heap.h.

Referenced by IncrementDeferredCounts(), and ReportStatisticsAfterGC().

◆ delay_sweeper_tasks_for_testing_

bool v8::internal::Heap::delay_sweeper_tasks_for_testing_ = false
private

Definition at line 2450 of file heap.h.

Referenced by FinishSweepingIfOutOfWork().

◆ deserialization_complete_

bool v8::internal::Heap::deserialization_complete_ = false
private

◆ dirty_js_finalization_registries_list_

Tagged<Object> v8::internal::Heap::dirty_js_finalization_registries_list_ = Smi::zero()
private

Definition at line 2302 of file heap.h.

◆ dirty_js_finalization_registries_list_tail_

Tagged<Object> v8::internal::Heap::dirty_js_finalization_registries_list_tail_ = Smi::zero()
private

Definition at line 2304 of file heap.h.

◆ embedder_roots_handler_

EmbedderRootsHandler* v8::internal::Heap::embedder_roots_handler_
private
Initial value:
=
nullptr

Definition at line 2371 of file heap.h.

Referenced by GetEmbedderRootsHandler(), SetEmbedderRootsHandler(), and TearDown().

◆ embedder_size_at_last_gc_

size_t v8::internal::Heap::embedder_size_at_last_gc_ = 0
private

◆ embedder_stack_state_

◆ embedder_stack_state_origin_

std::optional<EmbedderStackStateOrigin> v8::internal::Heap::embedder_stack_state_origin_
private

◆ ephemeron_remembered_set_

std::unique_ptr<EphemeronRememberedSet> v8::internal::Heap::ephemeron_remembered_set_
private

Definition at line 2347 of file heap.h.

Referenced by SetUp(), and TearDown().

◆ external_memory_

◆ external_memory_callback_

GetExternallyAllocatedMemoryInBytesCallback v8::internal::Heap::external_memory_callback_
private

Definition at line 2309 of file heap.h.

Referenced by PrintShortHeapStatistics().

◆ external_string_table_

◆ force_gc_on_next_allocation_

bool v8::internal::Heap::force_gc_on_next_allocation_ = false
private

Definition at line 2449 of file heap.h.

Referenced by CanExpandOldGeneration(), and GarbageCollectionPrologue().

◆ force_oom_

bool v8::internal::Heap::force_oom_ = false
private

Definition at line 2448 of file heap.h.

Referenced by CanExpandOldGeneration().

◆ gc_callbacks_depth_

◆ gc_count_

unsigned int v8::internal::Heap::gc_count_ = 0
private

◆ gc_epilogue_callbacks_

GCCallbacks v8::internal::Heap::gc_epilogue_callbacks_
private

◆ gc_prologue_callbacks_

GCCallbacks v8::internal::Heap::gc_prologue_callbacks_
private

◆ gc_state_

std::atomic<HeapState> v8::internal::Heap::gc_state_ {NOT_IN_GC}
private

Definition at line 2258 of file heap.h.

Referenced by SetGCState().

◆ global_allocation_limit_

std::atomic<size_t> v8::internal::Heap::global_allocation_limit_ {0}
private

Definition at line 2293 of file heap.h.

Referenced by SetOldGenerationAndGlobalAllocationLimit().

◆ heap_allocator_

HeapAllocator* v8::internal::Heap::heap_allocator_ = nullptr
private

◆ heap_expansion_mutex_

base::Mutex v8::internal::Heap::heap_expansion_mutex_
private

Definition at line 2380 of file heap.h.

◆ heap_profiler_

std::unique_ptr<HeapProfiler> v8::internal::Heap::heap_profiler_
private

Definition at line 2348 of file heap.h.

Referenced by ConfigureHeap(), and TearDown().

◆ ignore_local_gc_requests_depth_

int v8::internal::Heap::ignore_local_gc_requests_depth_ = 0
private

◆ incremental_marking_

std::unique_ptr<IncrementalMarking> v8::internal::Heap::incremental_marking_
private

Definition at line 2337 of file heap.h.

Referenced by PerformGarbageCollection(), SetUp(), and TearDown().

◆ initial_max_old_generation_size_

size_t v8::internal::Heap::initial_max_old_generation_size_ = 0
private

◆ initial_max_old_generation_size_threshold_

size_t v8::internal::Heap::initial_max_old_generation_size_threshold_ = 0
private

Definition at line 2159 of file heap.h.

Referenced by AutomaticallyRestoreInitialHeapLimit(), and CollectGarbage().

◆ initial_old_generation_size_

size_t v8::internal::Heap::initial_old_generation_size_ = 0
private

◆ initial_semispace_size_

size_t v8::internal::Heap::initial_semispace_size_ = 0
private

Definition at line 2146 of file heap.h.

Referenced by ConfigureHeap(), and SetUpSpaces().

◆ initial_size_overwritten_

bool v8::internal::Heap::initial_size_overwritten_ = false
private

◆ inline_allocation_enabled_

bool v8::internal::Heap::inline_allocation_enabled_ = true
private

Definition at line 2443 of file heap.h.

Referenced by DisableInlineAllocation(), and EnableInlineAllocation().

◆ is_current_gc_for_heap_profiler_

bool v8::internal::Heap::is_current_gc_for_heap_profiler_ = false
private

Definition at line 2425 of file heap.h.

Referenced by CollectGarbage(), and GarbageCollectionPrologue().

◆ is_current_gc_forced_

bool v8::internal::Heap::is_current_gc_forced_ = false
private

Definition at line 2424 of file heap.h.

Referenced by CollectGarbage(), and GarbageCollectionPrologue().

◆ is_finalization_registry_cleanup_task_posted_

bool v8::internal::Heap::is_finalization_registry_cleanup_task_posted_ = false
private

Definition at line 2454 of file heap.h.

Referenced by PostFinalizationRegistryCleanupTaskIfNeeded().

◆ is_full_gc_during_loading_

bool v8::internal::Heap::is_full_gc_during_loading_ = false
private

Definition at line 2476 of file heap.h.

Referenced by CollectGarbage(), and StartIncrementalMarking().

◆ isolate_

◆ kInitialEvalCacheSize

const int v8::internal::Heap::kInitialEvalCacheSize = 64
staticprivate

Definition at line 1760 of file heap.h.

◆ kLoadTimeNotLoading

constexpr double v8::internal::Heap::kLoadTimeNotLoading = -1.0
staticconstexprprivate

Definition at line 2467 of file heap.h.

Referenced by NotifyLoadingEnded(), NotifyLoadingStarted(), and ShouldOptimizeForLoadTime().

◆ kMaxLoadTimeMs

const int v8::internal::Heap::kMaxLoadTimeMs = 7000
staticprivate

Definition at line 1977 of file heap.h.

Referenced by ShouldOptimizeForLoadTime().

◆ kMaxMarkCompactsInIdleRound

const int v8::internal::Heap::kMaxMarkCompactsInIdleRound = 7
staticprivate

Definition at line 1768 of file heap.h.

◆ kMinObjectSizeInTaggedWords

◆ kMmapRegionMask

const uintptr_t v8::internal::Heap::kMmapRegionMask = 0xFFFFFFFFu
staticprivate

Definition at line 2281 of file heap.h.

Referenced by SetUp().

◆ kNewLargeObjectSpaceToSemiSpaceRatio

constexpr size_t v8::internal::Heap::kNewLargeObjectSpaceToSemiSpaceRatio = 1
staticconstexpr

◆ kOldSurvivalRateLowThreshold

const int v8::internal::Heap::kOldSurvivalRateLowThreshold = 10
staticprivate

Definition at line 1766 of file heap.h.

Referenced by EvaluateOldSpaceLocalPretenuring().

◆ kPhysicalMemoryToOldGenerationRatio

constexpr size_t v8::internal::Heap::kPhysicalMemoryToOldGenerationRatio = 4
staticconstexpr

Definition at line 319 of file heap.h.

Referenced by HeapSizeFromPhysicalMemory().

◆ kPointerMultiplier

const int v8::internal::Heap::kPointerMultiplier = kTaggedSize / 4
static

◆ kRememberedUnmappedPages

const int v8::internal::Heap::kRememberedUnmappedPages = 128
staticprivate

Definition at line 1762 of file heap.h.

Referenced by RememberUnmappedPage().

◆ kRetainMapEntrySize

constexpr int v8::internal::Heap::kRetainMapEntrySize = 2
staticconstexprprivate

Definition at line 1880 of file heap.h.

Referenced by AddRetainedMaps(), and CompactRetainedMaps().

◆ kStacktraceBufferSize

const int v8::internal::Heap::kStacktraceBufferSize = 512
static

Definition at line 323 of file heap.h.

◆ kTraceRingBufferSize

const int v8::internal::Heap::kTraceRingBufferSize = 512
static

Definition at line 322 of file heap.h.

Referenced by AddToRingBuffer(), and GetFromRingBuffer().

◆ kYoungSurvivalRateAllowedDeviation

const int v8::internal::Heap::kYoungSurvivalRateAllowedDeviation = 15
staticprivate

Definition at line 1765 of file heap.h.

◆ kYoungSurvivalRateHighThreshold

const int v8::internal::Heap::kYoungSurvivalRateHighThreshold = 90
staticprivate

Definition at line 1764 of file heap.h.

◆ last_gc_time_

double v8::internal::Heap::last_gc_time_ = 0.0
private

Definition at line 2327 of file heap.h.

Referenced by GarbageCollectionEpilogue().

◆ live_object_stats_

std::unique_ptr<ObjectStats> v8::internal::Heap::live_object_stats_
private

◆ lo_space_

◆ load_start_time_ms_

std::atomic<double> v8::internal::Heap::load_start_time_ms_ {kLoadTimeNotLoading}
private

Definition at line 2470 of file heap.h.

Referenced by NotifyLoadingEnded(), NotifyLoadingStarted(), and ShouldOptimizeForLoadTime().

◆ main_thread_local_heap_

LocalHeap* v8::internal::Heap::main_thread_local_heap_ = nullptr
private

◆ mark_compact_collector_

std::unique_ptr<MarkCompactCollector> v8::internal::Heap::mark_compact_collector_
private

Definition at line 2331 of file heap.h.

Referenced by SetUp(), and TearDown().

◆ marking_state_

MarkingState v8::internal::Heap::marking_state_
private

Definition at line 2456 of file heap.h.

◆ max_global_memory_size_

◆ max_marking_limit_reached_

std::atomic<double> v8::internal::Heap::max_marking_limit_reached_ = 0.0
private

Definition at line 2269 of file heap.h.

Referenced by IncrementalMarkingLimitReached(), and PrintMaxMarkingLimitReached().

◆ max_old_generation_size_

std::atomic<size_t> v8::internal::Heap::max_old_generation_size_ {0}
private

◆ max_regular_code_object_size_

int v8::internal::Heap::max_regular_code_object_size_ = 0
private

Definition at line 2441 of file heap.h.

Referenced by Heap(), and MaxRegularHeapObjectSize().

◆ max_semi_space_size_

size_t v8::internal::Heap::max_semi_space_size_ = 0
private

Definition at line 2144 of file heap.h.

Referenced by ConfigureHeap(), MaxReserved(), and SetUpSpaces().

◆ maximum_committed_

size_t v8::internal::Heap::maximum_committed_ = 0
private

Definition at line 2180 of file heap.h.

Referenced by UpdateMaximumCommitted().

◆ mb_

std::unique_ptr<MemoryBalancer> v8::internal::Heap::mb_
private

◆ memory_allocator_

std::unique_ptr<MemoryAllocator> v8::internal::Heap::memory_allocator_
private

Definition at line 2336 of file heap.h.

Referenced by GarbageCollectionEpilogueInSafepoint(), SetUp(), and TearDown().

◆ memory_measurement_

std::unique_ptr<MemoryMeasurement> v8::internal::Heap::memory_measurement_
private

Definition at line 2339 of file heap.h.

Referenced by MeasureMemory(), SetUpSpaces(), and TearDown().

◆ memory_pressure_level_

std::atomic<v8::MemoryPressureLevel> v8::internal::Heap::memory_pressure_level_
private

◆ memory_reducer_

std::unique_ptr<MemoryReducer> v8::internal::Heap::memory_reducer_
private

◆ min_global_memory_size_

size_t v8::internal::Heap::min_global_memory_size_ = 0
private

Definition at line 2155 of file heap.h.

Referenced by ConfigureHeap(), and EnsureMinimumRemainingAllocationLimit().

◆ min_old_generation_size_

size_t v8::internal::Heap::min_old_generation_size_ = 0
private

Definition at line 2149 of file heap.h.

Referenced by ConfigureHeap().

◆ min_semi_space_size_

size_t v8::internal::Heap::min_semi_space_size_ = 0
private

Definition at line 2145 of file heap.h.

Referenced by ConfigureHeap(), and SetUpSpaces().

◆ minor_gc_job_

std::unique_ptr<MinorGCJob> v8::internal::Heap::minor_gc_job_
private

Definition at line 2343 of file heap.h.

Referenced by SetUpSpaces(), and TearDown().

◆ minor_mark_sweep_collector_

std::unique_ptr<MinorMarkSweepCollector> v8::internal::Heap::minor_mark_sweep_collector_
private

Definition at line 2332 of file heap.h.

Referenced by MinorMarkSweep(), SetUp(), and TearDown().

◆ mmap_region_base_

uintptr_t v8::internal::Heap::mmap_region_base_ = 0
private

Definition at line 2282 of file heap.h.

Referenced by SetUp().

◆ ms_count_

unsigned int v8::internal::Heap::ms_count_ = 0
private

◆ native_contexts_list_

std::atomic<Address> v8::internal::Heap::native_contexts_list_
private

Definition at line 2299 of file heap.h.

◆ near_heap_limit_callbacks_

std::vector<std::pair<v8::NearHeapLimitCallback, void*> > v8::internal::Heap::near_heap_limit_callbacks_
private

◆ need_to_remove_stress_concurrent_allocation_observer_

bool v8::internal::Heap::need_to_remove_stress_concurrent_allocation_observer_ = false
private

◆ new_lo_space_

◆ new_space_

◆ new_space_allocation_counter_

size_t v8::internal::Heap::new_space_allocation_counter_ = 0
private

Definition at line 2387 of file heap.h.

Referenced by GarbageCollectionPrologueInSafepoint(), and NewSpaceAllocationCounter().

◆ new_space_surviving_object_size_

size_t v8::internal::Heap::new_space_surviving_object_size_ = 0
private

Definition at line 2316 of file heap.h.

Referenced by GarbageCollectionPrologue(), and UpdateSurvivalStatistics().

◆ new_space_surviving_rate_

double v8::internal::Heap::new_space_surviving_rate_ = 0.0
private

Definition at line 2318 of file heap.h.

Referenced by v8::internal::GCTracer::PrintNVP(), and UpdateSurvivalStatistics().

◆ nodes_copied_in_new_space_

int v8::internal::Heap::nodes_copied_in_new_space_ = 0
private

Definition at line 2320 of file heap.h.

Referenced by GarbageCollectionPrologue(), and v8::internal::GCTracer::PrintNVP().

◆ nodes_died_in_new_space_

int v8::internal::Heap::nodes_died_in_new_space_ = 0
private

Definition at line 2319 of file heap.h.

Referenced by GarbageCollectionPrologue(), and v8::internal::GCTracer::PrintNVP().

◆ nodes_promoted_

int v8::internal::Heap::nodes_promoted_ = 0
private

Definition at line 2321 of file heap.h.

Referenced by GarbageCollectionPrologue(), and v8::internal::GCTracer::PrintNVP().

◆ non_atomic_marking_state_

NonAtomicMarkingState v8::internal::Heap::non_atomic_marking_state_
private

Definition at line 2457 of file heap.h.

◆ old_generation_allocation_counter_at_last_gc_

size_t v8::internal::Heap::old_generation_allocation_counter_at_last_gc_ = 0
private

Definition at line 2392 of file heap.h.

Referenced by MarkCompact().

◆ old_generation_allocation_limit_

std::atomic<size_t> v8::internal::Heap::old_generation_allocation_limit_ {0}
private

◆ old_generation_capacity_after_bootstrap_

size_t v8::internal::Heap::old_generation_capacity_after_bootstrap_ = 0
private

Definition at line 2181 of file heap.h.

Referenced by NotifyBootstrapComplete(), and NotifyOldGenerationExpansion().

◆ old_generation_size_at_last_gc_

size_t v8::internal::Heap::old_generation_size_at_last_gc_ {0}
private

◆ old_generation_wasted_at_last_gc_

size_t v8::internal::Heap::old_generation_wasted_at_last_gc_ {0}
private

◆ old_space_

◆ owning_cpp_heap_

std::unique_ptr<CppHeap> v8::internal::Heap::owning_cpp_heap_
private

Definition at line 2365 of file heap.h.

Referenced by AttachCppHeap(), ConfigureHeap(), and TearDown().

◆ pause_allocation_observers_depth_

int v8::internal::Heap::pause_allocation_observers_depth_ = 0
private

◆ preconfigured_old_generation_size_

bool v8::internal::Heap::preconfigured_old_generation_size_ = false
private

Definition at line 2178 of file heap.h.

Referenced by ConfigureHeap(), and NotifyContextDisposed().

◆ pretenuring_handler_

PretenuringHandler v8::internal::Heap::pretenuring_handler_
private

Definition at line 2459 of file heap.h.

Referenced by PerformGarbageCollection(), and TearDown().

◆ previous_new_space_surviving_object_size_

size_t v8::internal::Heap::previous_new_space_surviving_object_size_ = 0
private

Definition at line 2317 of file heap.h.

Referenced by GarbageCollectionPrologue(), and UpdateSurvivalStatistics().

◆ promoted_objects_size_

size_t v8::internal::Heap::promoted_objects_size_ = 0
private

Definition at line 2313 of file heap.h.

Referenced by GarbageCollectionPrologue(), MarkCompact(), and UpdateSurvivalStatistics().

◆ promotion_rate_

double v8::internal::Heap::promotion_rate_ = 0.0
private

Definition at line 2315 of file heap.h.

Referenced by v8::internal::GCTracer::PrintNVP(), and UpdateSurvivalStatistics().

◆ promotion_ratio_

double v8::internal::Heap::promotion_ratio_ = 0.0
private

Definition at line 2314 of file heap.h.

Referenced by v8::internal::GCTracer::PrintNVP(), and UpdateSurvivalStatistics().

◆ read_only_space_

◆ remembered_unmapped_pages_

Address v8::internal::Heap::remembered_unmapped_pages_[kRememberedUnmappedPages]
private

Definition at line 2286 of file heap.h.

Referenced by RememberUnmappedPage().

◆ remembered_unmapped_pages_index_

int v8::internal::Heap::remembered_unmapped_pages_index_ = 0
private

Definition at line 2285 of file heap.h.

Referenced by RememberUnmappedPage().

◆ resize_new_space_mode_

ResizeNewSpaceMode v8::internal::Heap::resize_new_space_mode_ = ResizeNewSpaceMode::kNone
private

Definition at line 2462 of file heap.h.

Referenced by CollectGarbage(), ResizeNewSpace(), and StartResizeNewSpace().

◆ ring_buffer_end_

size_t v8::internal::Heap::ring_buffer_end_ = 0
private

Definition at line 2409 of file heap.h.

Referenced by AddToRingBuffer(), and GetFromRingBuffer().

◆ ring_buffer_full_

bool v8::internal::Heap::ring_buffer_full_ = false
private

Definition at line 2408 of file heap.h.

Referenced by AddToRingBuffer(), and GetFromRingBuffer().

◆ safepoint_

std::unique_ptr<IsolateSafepoint> v8::internal::Heap::safepoint_
private

Definition at line 2422 of file heap.h.

Referenced by IterateRoots().

◆ scavenger_collector_

std::unique_ptr<ScavengerCollector> v8::internal::Heap::scavenger_collector_
private

Definition at line 2333 of file heap.h.

Referenced by EnsureQuarantinedPagesSweepingCompleted(), Scavenge(), SetUp(), and TearDown().

◆ selective_stack_scan_start_address_

std::optional<const void*> v8::internal::Heap::selective_stack_scan_start_address_
private

◆ shared_allocation_space_

PagedSpace* v8::internal::Heap::shared_allocation_space_ = nullptr
private

Definition at line 2223 of file heap.h.

Referenced by SetUpSpaces(), and SharedHeapContains().

◆ shared_lo_allocation_space_

OldLargeObjectSpace* v8::internal::Heap::shared_lo_allocation_space_ = nullptr
private

Definition at line 2224 of file heap.h.

Referenced by SetUpSpaces(), and SharedHeapContains().

◆ shared_lo_space_

SharedLargeObjectSpace* v8::internal::Heap::shared_lo_space_ = nullptr
private

◆ shared_space_

SharedSpace* v8::internal::Heap::shared_space_ = nullptr
private

Definition at line 2210 of file heap.h.

Referenced by Contains(), InSpace(), InSpaceSlow(), and SetUpSpaces().

◆ shared_trusted_allocation_space_

SharedTrustedSpace* v8::internal::Heap::shared_trusted_allocation_space_ = nullptr
private

Definition at line 2225 of file heap.h.

Referenced by SetUpSpaces(), and SharedHeapContains().

◆ shared_trusted_lo_allocation_space_

SharedTrustedLargeObjectSpace* v8::internal::Heap::shared_trusted_lo_allocation_space_ = nullptr
private

Definition at line 2226 of file heap.h.

Referenced by SetUpSpaces(), and SharedHeapContains().

◆ shared_trusted_lo_space_

SharedTrustedLargeObjectSpace* v8::internal::Heap::shared_trusted_lo_space_ = nullptr
private

Definition at line 2219 of file heap.h.

Referenced by Contains(), InSpace(), InSpaceSlow(), and SetUpSpaces().

◆ shared_trusted_space_

SharedTrustedSpace* v8::internal::Heap::shared_trusted_space_ = nullptr
private

Definition at line 2217 of file heap.h.

Referenced by Contains(), InSpace(), InSpaceSlow(), and SetUpSpaces().

◆ space_

std::unique_ptr<Space> v8::internal::Heap::space_[LAST_SPACE+1]
private

Definition at line 2229 of file heap.h.

Referenced by paged_space(), SetUpFromReadOnlyHeap(), SetUpSpaces(), space(), and TearDown().

◆ stress_concurrent_allocation_observer_

std::unique_ptr<AllocationObserver> v8::internal::Heap::stress_concurrent_allocation_observer_
private

Definition at line 2344 of file heap.h.

Referenced by NotifyDeserializationComplete(), and TearDown().

◆ stress_marking_percentage_

int v8::internal::Heap::stress_marking_percentage_ = 0
private

◆ stress_scavenge_observer_

StressScavengeObserver* v8::internal::Heap::stress_scavenge_observer_ = nullptr
private

Definition at line 2265 of file heap.h.

Referenced by HandleGCRequest(), PrintMaxNewSpaceSizeReached(), SetUpSpaces(), and TearDown().

◆ strong_roots_head_

StrongRootsEntry* v8::internal::Heap::strong_roots_head_ = nullptr
private

Definition at line 2377 of file heap.h.

Referenced by IterateRoots(), RegisterStrongRoots(), TearDown(), and UnregisterStrongRoots().

◆ strong_roots_mutex_

base::Mutex v8::internal::Heap::strong_roots_mutex_
private

Definition at line 2378 of file heap.h.

Referenced by RegisterStrongRoots(), and UnregisterStrongRoots().

◆ survived_since_last_expansion_

size_t v8::internal::Heap::survived_since_last_expansion_ = 0
private

Definition at line 2190 of file heap.h.

Referenced by ShouldResizeNewSpace().

◆ sweeper_

std::unique_ptr<Sweeper> v8::internal::Heap::sweeper_
private

Definition at line 2330 of file heap.h.

Referenced by EnsureSweepingCompleted(), EnsureYoungSweepingCompleted(), SetUp(), and TearDown().

◆ task_runner_

std::shared_ptr<v8::TaskRunner> v8::internal::Heap::task_runner_
private

◆ total_gc_time_ms_

base::TimeDelta v8::internal::Heap::total_gc_time_ms_
private

Definition at line 2324 of file heap.h.

Referenced by PrintShortHeapStatistics(), and UpdateTotalGCTime().

◆ trace_ring_buffer_

char v8::internal::Heap::trace_ring_buffer_[kTraceRingBufferSize]
private

Definition at line 2403 of file heap.h.

Referenced by AddToRingBuffer(), and GetFromRingBuffer().

◆ tracer_

std::unique_ptr<GCTracer> v8::internal::Heap::tracer_
private

Definition at line 2329 of file heap.h.

Referenced by SetUpSpaces(), ShouldResizeNewSpace(), and TearDown().

◆ trusted_lo_space_

◆ trusted_space_

TrustedSpace* v8::internal::Heap::trusted_space_ = nullptr
private

◆ update_allocation_limits_after_loading_

bool v8::internal::Heap::update_allocation_limits_after_loading_ = false
private

◆ using_initial_limit_

std::atomic<bool> v8::internal::Heap::using_initial_limit_ = true
private

Definition at line 2170 of file heap.h.


The documentation for this class was generated from the following files: